diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 683ce3e9..d5f6e149 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -9,9 +9,9 @@ on: jobs: - build_docs: + docs: runs-on: ubuntu-latest - name: build_docs + name: Build Documentation steps: - name: Checkout @@ -32,9 +32,42 @@ jobs: run: | poetry run python -m nox -s build-docs - run_tests: + checks: runs-on: ubuntu-latest - needs: build_docs + name: Project Checks (Python-${{ matrix.python }}) + strategy: + fail-fast: false + matrix: + python: + - "3.8" + - "3.9" + - "3.10" + steps: + - name: Fetch sqlalchemy_exasol code from repository + uses: actions/checkout@v3 + + - name: Install via apt + run: sudo apt-get install unixodbc unixodbc-dev libboost-date-time-dev libboost-locale-dev libboost-system-dev + + - name: Setup Python ${{ matrix.python }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python }} + + - name: Install poetry + uses: abatilo/actions-poetry@v2.1.5 + with: + poetry-version: 1.2.1 + + - name: Install python project dependencies + run: poetry install + + - name: Project Checks (Python-${{ matrix.python }}) + run: poetry run nox -s check + + tests: + runs-on: ubuntu-latest + needs: [docs, checks] strategy: fail-fast: false matrix: @@ -49,79 +82,74 @@ jobs: - 7.1.9 - 7.0.18 - name: Run Tests (Python-${{ matrix.python }}, Connector-${{ matrix.connector }}, Exasol-${{ matrix.exasol_version }}) + name: Integration Tests (Python-${{ matrix.python }}, Connector-${{ matrix.connector }}, Exasol-${{ matrix.exasol_version }}) steps: - - name: Fetch sqlalchemy_exasol code from repository - uses: actions/checkout@v3 - - - name: Setup integration-test-docker-environment - uses: actions/setup-python@v4 + - name: Fetch sqlalchemy_exasol code from repository + uses: actions/checkout@v3 - - name: Install via apt - run: sudo apt-get install unixodbc unixodbc-dev libboost-date-time-dev libboost-locale-dev libboost-system-dev + - name: Install via apt + run: sudo apt-get install unixodbc unixodbc-dev libboost-date-time-dev libboost-locale-dev libboost-system-dev - - name: Setup Python ${{ matrix.python }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python }} + - name: Setup Python ${{ matrix.python }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python }} - - name: Install poetry - uses: abatilo/actions-poetry@v2.1.5 - with: - poetry-version: 1.1.13 + - name: Install poetry + uses: abatilo/actions-poetry@v2.1.5 + with: + poetry-version: 1.2.1 - - name: Install python project dependencies - run: poetry install + - name: Install python project dependencies + run: poetry install - - name: Install python project dependencies including trubodbc - run: poetry install --extras "turbodbc" - if: ${{ matrix.connector == 'turbodbc' }} + - name: Install python project dependencies including trubodbc + run: poetry install --extras "turbodbc" + if: ${{ matrix.connector == 'turbodbc' }} - - name: Checkout test environment - run: git clone --depth 1 --branch ${ITDE_TAG} ${ITDE_URL} - working-directory: .. - env: - ITDE_URL: "https://github.com/exasol/integration-test-docker-environment.git" - ITDE_TAG: "0.11.0" + - name: Checkout test environment + run: git clone --depth 1 --branch ${ITDE_TAG} ${ITDE_URL} + working-directory: .. + env: + ITDE_URL: "https://github.com/exasol/integration-test-docker-environment.git" + ITDE_TAG: "0.11.0" - - name: Run Test for Python ${{ matrix.python }} using ${{ matrix.connector }} - run: poetry run nox -s "verify(connector='${{ matrix.connector }}', db_version='${{ matrix.exasol_version }}')" + - name: Run Test for Python ${{ matrix.python }} using ${{ matrix.connector }} + run: poetry run nox -s integration-tests -- --connector ${{ matrix.connector }} --db-version ${{ matrix.exasol_version }} upload_to_pypi: runs-on: ubuntu-latest - needs: run_tests + needs: tests + name: Build & Upload Package [PYPI] if: startsWith(github.event.ref, 'refs/tags') strategy: matrix: - python: [3.8] - - name: Build & Upload Package [PYPI] + python: [ 3.8 ] steps: + - name: Fetch sqlalchemy_exasol code from repository + uses: actions/checkout@v3 - - name: Fetch sqlalchemy_exasol code from repository - uses: actions/checkout@v3 - - - name: Fetch all tags - run: git fetch origin +refs/tags/*:refs/tags/* + - name: Fetch all tags + run: git fetch origin +refs/tags/*:refs/tags/* - - name: Setup Python ${{ matrix.python }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python }} + - name: Setup Python ${{ matrix.python }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python }} - - name: Install poetry - uses: abatilo/actions-poetry@v2.1.5 - with: - poetry-version: 1.1.13 + - name: Install poetry + uses: abatilo/actions-poetry@v2.1.5 + with: + poetry-version: 1.2.1 - - name: Install python project dependencies - run: poetry install + - name: Install python project dependencies + run: poetry install - - name: Build and push package to PYPI - env: - POETRY_HTTP_BASIC_PYPI_USERNAME: "__token__" - POETRY_HTTP_BASIC_PYPI_PASSWORD: "${{ secrets.pypi_token }}" - run: poetry run nox -s release + - name: Build and push package to PYPI + env: + POETRY_HTTP_BASIC_PYPI_USERNAME: "__token__" + POETRY_HTTP_BASIC_PYPI_PASSWORD: "${{ secrets.pypi_token }}" + run: poetry run nox -s release diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 05974ca4..dea96347 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -13,9 +13,9 @@ on: jobs: - build_docs: + docs: runs-on: ubuntu-latest - name: build_docs + name: Build Documentation steps: - name: Checkout @@ -36,9 +36,42 @@ jobs: run: | poetry run python -m nox -s build-docs - run_tests: + checks: runs-on: ubuntu-latest - needs: build_docs + name: Project Checks (Python-${{ matrix.python }}) + strategy: + fail-fast: false + matrix: + python: + - "3.8" + - "3.9" + - "3.10" + steps: + - name: Fetch sqlalchemy_exasol code from repository + uses: actions/checkout@v3 + + - name: Install via apt + run: sudo apt-get install unixodbc unixodbc-dev libboost-date-time-dev libboost-locale-dev libboost-system-dev + + - name: Setup Python ${{ matrix.python }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python }} + + - name: Install poetry + uses: abatilo/actions-poetry@v2.1.5 + with: + poetry-version: 1.2.1 + + - name: Install python project dependencies + run: poetry install + + - name: Project Checks (Python-${{ matrix.python }}) + run: poetry run nox -s check + + tests: + runs-on: ubuntu-latest + needs: [docs, checks] strategy: fail-fast: false matrix: @@ -53,72 +86,71 @@ jobs: - 7.1.9 - 7.0.18 - name: Run Tests (Python-${{ matrix.python }}, Connector-${{ matrix.connector }}, Exasol-${{ matrix.exasol_version }}) + name: Integration Tests (Python-${{ matrix.python }}, Connector-${{ matrix.connector }}, Exasol-${{ matrix.exasol_version }}) steps: - - name: Fetch sqlalchemy_exasol code from repository - uses: actions/checkout@v3 + - name: Fetch sqlalchemy_exasol code from repository + uses: actions/checkout@v3 - - name: Install via apt - run: sudo apt-get install unixodbc unixodbc-dev libboost-date-time-dev libboost-locale-dev libboost-system-dev + - name: Install via apt + run: sudo apt-get install unixodbc unixodbc-dev libboost-date-time-dev libboost-locale-dev libboost-system-dev - - name: Setup Python ${{ matrix.python }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python }} + - name: Setup Python ${{ matrix.python }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python }} - - name: Install poetry - uses: abatilo/actions-poetry@v2.1.5 - with: - poetry-version: 1.1.13 + - name: Install poetry + uses: abatilo/actions-poetry@v2.1.5 + with: + poetry-version: 1.2.1 - - name: Install python project dependencies - run: poetry install + - name: Install python project dependencies + run: poetry install - - name: Install python project dependencies including trubodbc - run: poetry install --extras "turbodbc" - if: ${{ matrix.connector == 'turbodbc' }} + - name: Install python project dependencies including trubodbc + run: poetry install --extras "turbodbc" + if: ${{ matrix.connector == 'turbodbc' }} - - name: Checkout test environment - run: git clone --depth 1 --branch ${ITDE_TAG} ${ITDE_URL} - working-directory: .. - env: - ITDE_URL: "https://github.com/exasol/integration-test-docker-environment.git" - ITDE_TAG: "0.11.0" + - name: Checkout test environment + run: git clone --depth 1 --branch ${ITDE_TAG} ${ITDE_URL} + working-directory: .. + env: + ITDE_URL: "https://github.com/exasol/integration-test-docker-environment.git" + ITDE_TAG: "0.11.0" - - name: Run Test for Python ${{ matrix.python }} using ${{ matrix.connector }} - run: poetry run nox -s "verify(connector='${{ matrix.connector }}', db_version='${{ matrix.exasol_version }}')" + - name: Run Test for Python ${{ matrix.python }} using ${{ matrix.connector }} + run: poetry run nox -s integration-tests -- --connector ${{ matrix.connector }} --db-version ${{ matrix.exasol_version }} build_package: runs-on: ubuntu-latest - needs: run_tests + name: Build Package + needs: tests strategy: matrix: - python: [3.8] - - name: Build Package + python: [ 3.8 ] steps: - - name: Fetch sqlalchemy_exasol code from repository - uses: actions/checkout@v3 + - name: Fetch sqlalchemy_exasol code from repository + uses: actions/checkout@v3 - - name: Fetch all tags - run: git fetch origin +refs/tags/*:refs/tags/* + - name: Fetch all tags + run: git fetch origin +refs/tags/*:refs/tags/* - - name: Setup Python ${{ matrix.python }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python }} + - name: Setup Python ${{ matrix.python }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python }} - - name: Install poetry - uses: abatilo/actions-poetry@v2.1.5 - with: - poetry-version: 1.1.13 + - name: Install poetry + uses: abatilo/actions-poetry@v2.1.5 + with: + poetry-version: 1.1.13 - - name: Install python project dependencies - run: poetry install + - name: Install python project dependencies + run: poetry install - - name: Build sdist and wheel packages - run: poetry build + - name: Build sdist and wheel packages + run: poetry build diff --git a/.github/workflows/gh-pages.yaml b/.github/workflows/gh-pages.yaml index d6395c9a..2e4e3674 100644 --- a/.github/workflows/gh-pages.yaml +++ b/.github/workflows/gh-pages.yaml @@ -23,7 +23,7 @@ jobs: - name: Install Dependencies run: | poetry install - - name: Build Documentations + - name: Build Documentation run: | poetry run python -m nox -s build-docs touch doc/build/.nojekyll diff --git a/.github/workflows/link-check.yaml b/.github/workflows/link-check.yaml index c3ad2dc1..f5d52233 100644 --- a/.github/workflows/link-check.yaml +++ b/.github/workflows/link-check.yaml @@ -31,10 +31,10 @@ jobs: - name: Install poetry uses: abatilo/actions-poetry@v2.1.5 with: - poetry-version: 1.1.13 + poetry-version: 1.2.1 - name: Install python project dependencies run: poetry install --no-root - name: Check documentation links ${{ matrix.python }} using ${{ matrix.connector }} - run: poetry run nox -s "check-links" + run: poetry run nox -s check-links diff --git a/changelog.d/20220810_120055_nicola.coretti__106_Support_Sqlalchemy_1_4.rst b/changelog.d/20220810_120055_nicola.coretti__106_Support_Sqlalchemy_1_4.rst new file mode 100644 index 00000000..c255b0f9 --- /dev/null +++ b/changelog.d/20220810_120055_nicola.coretti__106_Support_Sqlalchemy_1_4.rst @@ -0,0 +1,10 @@ +🔧 Changed +---------- +- Update SQLAlchemy dependency to >= 1.4 + + .. warning:: + + This may impact the performance (see also `SQLAlchemy docs `_). + If you are not willing or able to pay those potentional performance hits, you should wait until the `tracking-issue `_ + is resolved. + diff --git a/doc/developer_guide/integration_tests.rst b/doc/developer_guide/integration_tests.rst index cbc80520..abc8ba77 100644 --- a/doc/developer_guide/integration_tests.rst +++ b/doc/developer_guide/integration_tests.rst @@ -1,7 +1,49 @@ -Integration Test Setup -====================== +Integration Test +================ -Integration testing is done by GitHub Actions contained in this repository, which provide a CI/CD pipeline to test, build, and deploy sqlalchemy_exasol to Pypi. +The integration tests are located within `test/integration`. They are split in +two groups. + +#. The SQLAlchemy conformance test suite + + The sqlalchemy conformance test suite is provided and maintained by the sqlalchemy project and intended to support third party dialect developers. + For further details see also `README.dialects.rst `_. + +#. Our custom Exasol test suite + + The Exasol test suite consists of test written and maintained by exasol. + +.. note:: + + In order to reduce the likelihood of test side effects, the `sqlalchemy` and the `exasol` test suites + are executed in separate pytest test runs. + +.. attention:: + + The exasol database does do implicit schema/context changes (open & close schema) + in certain scenarios. Keep this in mind when setting up (writing) tests. + + #. CREATE SCHEMA implicitly changes the CURRENT_SCHEMA context + + When you create a new schema, you implicitly open this new schema. + + #. DROP SCHEMA sometimes switches the context to + + If the CURRENT_SCHEMA is dropped an implicit context switch to is done + + For further details have a look at the `Exasol-Documentation `_. + + .. note:: + + Creating/Using a new un-pooled connection can be used for protecting against + this side effect. + + +Setup +===== + +Integration testing automatically is done by GitHub Actions contained in this repository, which provide a CI/CD pipeline to test, build, and deploy sqlalchemy_exasol to PyPI. +All important tasks within the actions are using **nox**, which also can be used locally. Two main workflows are used for this purpose: @@ -31,7 +73,7 @@ This is meant to be used as the Production workflow. It's located under: This workflow will be executed anytime there's a commit pushed to **master**, or whenever a **tag** (release) is pushed. It does all the same steps than the CI workflow with one additional step at the end: Upload the package to Pypi. This upload step only happens when a tag is pushed, it will not be executed when commits are done in master. -To run it just commit and push to master (*Optional:* push a tag in case you want Pypi upload) and watch the workflow run in: +To run it just commit and push to master (*Optional:* push a tag in case you want PyPI upload) and watch the workflow run in: ``_ diff --git a/noxfile.py b/noxfile.py index 8a12cf4b..5194e5ad 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1,14 +1,12 @@ from __future__ import annotations -import os +import argparse import sys import webbrowser from argparse import ArgumentParser -from contextlib import contextmanager from pathlib import Path from shutil import rmtree from tempfile import TemporaryDirectory -from textwrap import dedent PROJECT_ROOT = Path(__file__).parent # scripts path also contains administrative code/modules which are used by some nox targets @@ -17,11 +15,7 @@ DOC_BUILD = DOC / "build" sys.path.append(f"{SCRIPTS}") -from typing import ( - Iterable, - Iterator, - MutableMapping, -) +from typing import Iterator import nox from git import tags @@ -30,16 +24,20 @@ from links import urls as _urls from nox import Session from nox.sessions import SessionRunner -from pyodbc import ( - Connection, - connect, +from odbc import ( + odbcconfig, + transaction, ) +from pyodbc import connect from version_check import ( version_from_poetry, version_from_python_module, version_from_string, ) +# default actions to be run if nothing is explicitly specified with the -s option +nox.options.sessions = ["fix"] + class Settings: ITDE = PROJECT_ROOT / ".." / "integration-test-docker-environment" @@ -52,25 +50,6 @@ class Settings: DB_VERSIONS = ("7.1.9", "7.0.18") -# default actions to be run if nothing is explicitly specified with the -s option -nox.options.sessions = [ - f"verify(connector='{Settings.CONNECTORS[0]}', db_version='{Settings.DB_VERSIONS[0]}')" -] - -ODBCINST_INI_TEMPLATE = dedent( - """ - [ODBC] - #Trace = yes - #TraceFile =~/odbc.trace - - [EXAODBC] - #Driver location will be appended in build environment: - DRIVER={driver} - - """ -) - - def find_session_runner(session: Session, name: str) -> SessionRunner: """Helper function to find parameterized action by name""" for s, _ in session._runner.manifest.list_all_sessions(): @@ -79,44 +58,6 @@ def find_session_runner(session: Session, name: str) -> SessionRunner: session.error(f"Could not find a nox session by the name {name!r}") -def transaction(connection: Connection, sql_statements: Iterable[str]) -> None: - cur = connection.cursor() - for statement in sql_statements: - cur.execute(statement) - cur.commit() - cur.close() - - -@contextmanager -def environment(env_vars: dict[str, str]) -> Iterator[MutableMapping[str, str]]: - _env = os.environ.copy() - os.environ.update(env_vars) - yield os.environ - os.environ.clear() - os.environ.update(_env) - - -@contextmanager -def temporary_odbc_config(config: str) -> Iterator[Path]: - with TemporaryDirectory() as tmp_dir: - config_dir = Path(tmp_dir) / "odbcconfig" - config_dir.mkdir(exist_ok=True) - config_file = config_dir / "odbcinst.ini" - with open(config_file, "w") as f: - f.write(config) - yield config_file - - -@contextmanager -def odbcconfig() -> Iterator[tuple[Path, MutableMapping[str, str]]]: - with temporary_odbc_config( - ODBCINST_INI_TEMPLATE.format(driver=Settings.ODBC_DRIVER) - ) as cfg: - env_vars = {"ODBCSYSINI": f"{cfg.parent.resolve()}"} - with environment(env_vars) as env: - yield cfg, env - - def _python_files(path: Path) -> Iterator[Path]: files = filter(lambda path: "dist" not in path.parts, PROJECT_ROOT.glob("**/*.py")) files = filter(lambda path: ".eggs" not in path.parts, files) @@ -126,6 +67,8 @@ def _python_files(path: Path) -> Iterator[Path]: @nox.session(python=False) def fix(session: Session) -> None: + """Run all available formatters and code upgrade tools against the code base""" + def apply_pyupgrade_fixes(session: Session) -> None: files = [f"{path}" for path in _python_files(PROJECT_ROOT)] session.run( @@ -153,66 +96,8 @@ def apply_pyupgrade_fixes(session: Session) -> None: @nox.session(python=False) -def pyupgrade(session: Session) -> None: - files = [f"{path}" for path in _python_files(PROJECT_ROOT)] - session.run("poetry", "run", "python", "-m", "pyupgrade", "--py38-plus", *files) - - -@nox.session(name="code-format", python=False) -def code_format(session: Session) -> None: - session.run( - "poetry", - "run", - "python", - "-m", - "black", - "--check", - "--diff", - "--color", - f"{PROJECT_ROOT}", - ) - - -@nox.session(python=False) -def isort(session: Session) -> None: - session.run( - "poetry", "run", "python", "-m", "isort", "-v", "--check", f"{PROJECT_ROOT}" - ) - - -@nox.session(python=False) -def lint(session: Session) -> None: - session.run( - "poetry", - "run", - "python", - "-m", - "pylint", - f'{PROJECT_ROOT / "scripts"}', - f'{PROJECT_ROOT / "sqlalchemy_exasol"}', - ) - - -@nox.session(name="type-check", python=False) -def type_check(session: Session) -> None: - session.run( - "poetry", - "run", - "mypy", - "--strict", - "--show-error-codes", - "--pretty", - "--show-column-numbers", - "--show-error-context", - "--scripts-are-modules", - ) - - -@nox.session(python=False) -@nox.parametrize("db_version", Settings.DB_VERSIONS) -@nox.parametrize("connector", Settings.CONNECTORS) -def verify(session: Session, connector: str, db_version: str) -> None: - """Prepare and run all available tests""" +def check(session: Session) -> None: + """Run all available source code checks against the code base (typecheck, linters, formatters, etc.)""" def is_version_in_sync() -> bool: return ( @@ -230,20 +115,26 @@ def is_version_in_sync() -> bool: session.notify("code-format") session.notify("type-check") session.notify("lint") - session.notify("type-check") - session.notify(find_session_runner(session, f"db-start(db_version='{db_version}')")) - session.notify( - find_session_runner(session, f"integration(connector='{connector}')") - ) - session.notify(find_session_runner(session, "db-stop")) @nox.session(name="db-start", python=False) -@nox.parametrize("db_version", Settings.DB_VERSIONS) -def start_db(session: Session, db_version: str = Settings.DB_VERSIONS[0]) -> None: - """Start the test database""" +def start_db(session: Session) -> None: + """Start a test database. For more details append '-- -h'""" - def start() -> None: + def parser() -> ArgumentParser: + p = ArgumentParser( + usage="nox -s start-db -- [-h] [--db-version]", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + ) + p.add_argument( + "--db-version", + choices=Settings.DB_VERSIONS, + default=Settings.DB_VERSIONS[0], + help="which will be used", + ) + return p + + def start(db_version: str) -> None: # Consider adding ITDE as dev dependency once ITDE is packaged properly with session.chdir(Settings.ITDE): session.run( @@ -263,8 +154,9 @@ def start() -> None: external=True, ) + # TODO/FIXME: setup to populate in confest.py def populate() -> None: - with odbcconfig(): + with odbcconfig(Settings.ODBC_DRIVER): settings = { "driver": "EXAODBC", "server": "localhost:8888", @@ -287,12 +179,13 @@ def populate() -> None: connection, ( "CREATE SCHEMA TEST_SCHEMA;", - "CREATE SCHEMA TEST_SCHEMA_2;", + # "CREATE SCHEMA TEST_SCHEMA_2;", ), ) connection.close() - start() + args = parser().parse_args(session.posargs) + start(args.db_version) populate() @@ -303,89 +196,148 @@ def stop_db(session: Session) -> None: session.run("docker", "kill", "test_container_test", external=True) -@nox.session(python=False) -@nox.parametrize("connector", Settings.CONNECTORS) -def integration(session: Session, connector: str) -> None: - """Run(s) the integration tests for a specific connector. Expects a test database to be available.""" +@nox.session(name="sqla-tests", python=False) +def sqlalchemy_tests(session: Session) -> None: + """ + Run the sqlalchemy integration tests suite. For more details append '-- -h' - with odbcconfig() as (config, env): - uri = "".join( - [ - "exa+{connector}:", - "//sys:exasol@localhost:{db_port}", - "/TEST?CONNECTIONLCALL=en_US.UTF-8&DRIVER=EXAODBC&SSLCertificate=SSL_VERIFY_NONE", - ] - ).format(connector=connector, db_port=Settings.DB_PORT) - session.run("pytest", "--dropfirst", "--dburi", uri, external=True, env=env) + Attention: + Make sure the sqla compliance suite is run in isolation, to avoid side effects from custom tests + e.g. because of unintended implicit schema open/closes. -@nox.session(name="report-skipped", python=False) -def report_skipped(session: Session) -> None: + Expects a running test db """ - Runs all tests for all supported connectors and creates a csv report of skipped tests for each connector. - Attention: This task expects a running test database (db-start). - """ + def parser() -> ArgumentParser: + p = ArgumentParser( + usage="nox -s sqla-tests -- [-h] [--connector]", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + ) + p.add_argument( + "--connector", + choices=Settings.CONNECTORS, + default=Settings.CONNECTORS[0], + help="which will be used", + ) + return p - with TemporaryDirectory() as tmp_dir: - for connector in Settings.CONNECTORS: - report = Path(tmp_dir) / f"test-report{connector}.json" - with odbcconfig() as (config, env): - uri = "".join( - [ - "exa+{connector}:", - "//sys:exasol@localhost:{db_port}", - "/TEST?CONNECTIONLCALL=en_US.UTF-8&DRIVER=EXAODBC&SSLCertificate=SSL_VERIFY_NONE", - ] - ).format(connector=connector, db_port=Settings.DB_PORT) - session.run( - "pytest", - "--dropfirst", - "--dburi", - uri, - "--json-report", - f"--json-report-file={report}", - external=True, - env=env, - ) + with odbcconfig(Settings.ODBC_DRIVER) as (config, env): + args = parser().parse_args(session.posargs) + connector = args.connector + session.run( + "pytest", + "--dropfirst", + "--db", + f"exasol-{connector}", + f"{PROJECT_ROOT / 'test' / 'integration' / 'sqlalchemy'}", + external=True, + env=env, + ) - session.run( - "python", - f"{SCRIPTS / 'report.py'}", - "-f", - "csv", - "--output", - f"skipped-tests-{connector}.csv", - f"{connector}", - f"{report}", - ) +@nox.session(name="exasol-tests", python=False) +def exasol_tests(session: Session) -> None: + """Run the integration tests with a specific connector. For more details append '-- -h'""" -@nox.session(name="check-links", python=False) -def check_links(session: Session) -> None: - """Checks weather or not all links in the documentation can be accessed""" - errors = [] - for path, url in _urls(_documentation(PROJECT_ROOT)): - status, details = _check(url) - if status != 200: - errors.append((path, url, status, details)) + def parser() -> ArgumentParser: + p = ArgumentParser( + usage="nox -s exasol-tests -- [-h] [--connector]", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + ) + p.add_argument( + "--connector", + choices=Settings.CONNECTORS, + default=Settings.CONNECTORS[0], + help="which will be used", + ) + return p - if errors: - session.error( - "\n" - + "\n".join(f"Url: {e[1]}, File: {e[0]}, Error: {e[3]}" for e in errors) + with odbcconfig(Settings.ODBC_DRIVER) as (config, env): + args = parser().parse_args(session.posargs) + connector = args.connector + session.run( + "pytest", + "--dropfirst", + "--db", + f"exasol-{connector}", + f"{PROJECT_ROOT / 'test' / 'integration' / 'exasol'}", + external=True, + env=env, ) -@nox.session(name="list-links", python=False) -def list_links(session: Session) -> None: - """List all links within the documentation""" - for path, url in _urls(_documentation(PROJECT_ROOT)): - session.log(f"Url: {url}, File: {path}") +@nox.session(name="integration-tests", python=False) +def integration_tests(session: Session) -> None: + """Run integration tests with a specific configuration. For more details append '-- -h'""" + + def parser() -> ArgumentParser: + p = ArgumentParser( + usage="nox -s integration-tests -- [-h] [--connector] [--db-version]", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + ) + p.add_argument( + "--connector", + choices=Settings.CONNECTORS, + default=Settings.CONNECTORS[0], + help="which will be used", + ) + p.add_argument( + "--db-version", + choices=Settings.DB_VERSIONS, + default=Settings.DB_VERSIONS[0], + help="which will be used", + ) + return p + + args = parser().parse_args(session.posargs) + session.notify( + find_session_runner(session, "db-start"), + posargs=["--db-version", f"{args.db_version}"], + ) + session.notify( + find_session_runner(session, f"sqla-tests"), + posargs=["--connector", f"{args.connector}"], + ) + session.notify( + find_session_runner(session, f"exasol-tests"), + posargs=["--connector", f"{args.connector}"], + ) + session.notify(find_session_runner(session, "db-stop")) + + +@nox.session(python=False, name="clean-docs") +def clean(session: Session) -> None: + """Remove all documentation artifacts""" + if DOC_BUILD.exists(): + rmtree(DOC_BUILD.resolve()) + session.log(f"Removed {DOC_BUILD}") + + +@nox.session(python=False, name="build-docs") +def build(session: Session) -> None: + """Build the documentation""" + session.run( + "sphinx-build", "-b", "html", "-W", f"{DOC}", f"{DOC_BUILD}", external=True + ) + + +@nox.session(python=False, name="open-docs") +def open_docs(session: Session) -> None: + """Open the documentation in the browser""" + index_page = DOC_BUILD / "index.html" + if not index_page.exists(): + session.error( + f"File {index_page} does not exist." "Please run `nox -s build-docs` first" + ) + + webbrowser.open_new_tab(index_page.resolve().as_uri()) @nox.session(python=False) def release(session: Session) -> None: + """Release a sqlalchemy-exasol package. For more details append '-- -h'""" + def create_parser() -> ArgumentParser: p = ArgumentParser( "Release a pypi package", @@ -423,29 +375,119 @@ def create_parser() -> ArgumentParser: ) -@nox.session(python=False, name="clean-docs") -def clean(session: Session) -> None: - """Remove all documentation artifacts""" - if DOC_BUILD.exists(): - rmtree(DOC_BUILD.resolve()) - session.log(f"Removed {DOC_BUILD}") +@nox.session(python=False) +def pyupgrade(session: Session) -> None: + """Run pyupgrade against the code base""" + files = [f"{path}" for path in _python_files(PROJECT_ROOT)] + session.run("poetry", "run", "python", "-m", "pyupgrade", "--py38-plus", *files) -@nox.session(python=False, name="build-docs") -def build(session: Session) -> None: - """Build the documentation""" +@nox.session(name="code-format", python=False) +def code_format(session: Session) -> None: + """Run the code formatter against the codebase""" session.run( - "sphinx-build", "-b", "html", "-W", f"{DOC}", f"{DOC_BUILD}", external=True + "poetry", + "run", + "python", + "-m", + "black", + "--check", + "--diff", + "--color", + f"{PROJECT_ROOT}", ) -@nox.session(python=False, name="open-docs") -def open_docs(session: Session) -> None: - """Open the documentation in the browser""" - index_page = DOC_BUILD / "index.html" - if not index_page.exists(): +@nox.session(python=False) +def isort(session: Session) -> None: + """Run isort against the codebase""" + session.run( + "poetry", "run", "python", "-m", "isort", "-v", "--check", f"{PROJECT_ROOT}" + ) + + +@nox.session(python=False) +def lint(session: Session) -> None: + """Run the linter against the codebase""" + session.run( + "poetry", + "run", + "python", + "-m", + "pylint", + f'{PROJECT_ROOT / "scripts"}', + f'{PROJECT_ROOT / "sqlalchemy_exasol"}', + ) + + +@nox.session(name="type-check", python=False) +def type_check(session: Session) -> None: + """Run the type checker against the codebase""" + session.run( + "poetry", + "run", + "mypy", + "--strict", + "--show-error-codes", + "--pretty", + "--show-column-numbers", + "--show-error-context", + "--scripts-are-modules", + ) + + +@nox.session(name="report-skipped", python=False) +def report_skipped(session: Session) -> None: + """ + Runs all tests for all supported connectors and creates a csv report of skipped tests for each connector. + + Attention: This task expects a running test database (db-start). + """ + with TemporaryDirectory() as tmp_dir: + for connector in Settings.CONNECTORS: + report = Path(tmp_dir) / f"test-report{connector}.json" + with odbcconfig(Settings.ODBC_DRIVER) as (config, env): + session.run( + "pytest", + "--dropfirst", + "--db", + f"exasol-{connector}", + f"{PROJECT_ROOT / 'test' / 'integration' / 'sqlalchemy'}", + "--json-report", + f"--json-report-file={report}", + external=True, + env=env, + ) + session.run( + "python", + f"{SCRIPTS / 'report.py'}", + "-f", + "csv", + "--output", + f"skipped-tests-{connector}.csv", + f"{connector}", + f"{report}", + ) + + +@nox.session(name="check-links", python=False) +def check_links(session: Session) -> None: + """Checks weather or not all links in the documentation can be accessed""" + errors = [] + for path, url in _urls(_documentation(PROJECT_ROOT)): + status, details = _check(url) + if status != 200: + errors.append((path, url, status, details)) + + if errors: session.error( - f"File {index_page} does not exist." "Please run `nox -s build-docs` first" + "\n" + + "\n".join(f"Url: {e[1]}, File: {e[0]}, Error: {e[3]}" for e in errors) ) - webbrowser.open_new_tab(index_page.resolve().as_uri()) + +@nox.session(name="list-links", python=False) +def list_links(session: Session) -> None: + """List all links within the documentation""" + for path, url in _urls(_documentation(PROJECT_ROOT)): + session.log(f"Url: {url}, File: {path}") diff --git a/poetry.lock b/poetry.lock index fda55d08..de32be91 100644 --- a/poetry.lock +++ b/poetry.lock @@ -19,16 +19,19 @@ test = ["wheel", "pexpect", "flake8", "coverage"] [[package]] name = "astroid" -version = "2.11.7" +version = "2.12.10" description = "An abstract syntax tree for Python with inference support." category = "dev" optional = false -python-versions = ">=3.6.2" +python-versions = ">=3.7.2" [package.dependencies] lazy-object-proxy = ">=1.4.0" typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""} -wrapt = ">=1.11,<2" +wrapt = [ + {version = ">=1.11,<2", markers = "python_version < \"3.11\""}, + {version = ">=1.14,<2", markers = "python_version >= \"3.11\""}, +] [[package]] name = "atomicwrites" @@ -80,7 +83,7 @@ lxml = ["lxml"] [[package]] name = "black" -version = "22.6.0" +version = "22.8.0" description = "The uncompromising code formatter." category = "dev" optional = false @@ -102,7 +105,7 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2022.6.15" +version = "2022.9.24" description = "Python package for providing Mozilla's CA Bundle." category = "dev" optional = false @@ -118,7 +121,7 @@ python-versions = ">=3.6.1" [[package]] name = "charset-normalizer" -version = "2.1.0" +version = "2.1.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "dev" optional = false @@ -159,7 +162,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "colorlog" -version = "6.6.0" +version = "6.7.0" description = "Add colours to the output of Python's logging module." category = "dev" optional = false @@ -173,7 +176,7 @@ development = ["black", "flake8", "mypy", "pytest", "types-colorama"] [[package]] name = "coverage" -version = "6.4.3" +version = "6.5.0" description = "Code coverage measurement for Python" category = "dev" optional = false @@ -198,7 +201,7 @@ graph = ["objgraph (>=1.7.2)"] [[package]] name = "distlib" -version = "0.3.5" +version = "0.3.6" description = "Distribution utilities" category = "dev" optional = false @@ -226,7 +229,7 @@ testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pyt [[package]] name = "furo" -version = "2022.6.21" +version = "2022.9.29" description = "A clean customisable Sphinx documentation theme." category = "dev" optional = false @@ -234,13 +237,24 @@ python-versions = ">=3.7" [package.dependencies] beautifulsoup4 = "*" -pygments = "*" +pygments = ">=2.7" sphinx = ">=4.0,<6.0" sphinx-basic-ng = "*" +[[package]] +name = "greenlet" +version = "1.1.3" +description = "Lightweight in-process concurrent programming" +category = "main" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" + +[package.extras] +docs = ["sphinx"] + [[package]] name = "identify" -version = "2.5.3" +version = "2.5.6" description = "File identification library for Python" category = "dev" optional = false @@ -251,7 +265,7 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.3" +version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" category = "dev" optional = false @@ -267,7 +281,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "importlib-metadata" -version = "4.12.0" +version = "5.0.0" description = "Read metadata from Python packages" category = "dev" optional = false @@ -277,9 +291,9 @@ python-versions = ">=3.7" zipp = ">=0.5" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] +docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "jaraco.tidelift (>=1.4)"] perf = ["ipython"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] [[package]] name = "iniconfig" @@ -343,11 +357,11 @@ python-versions = ">=3.6" [[package]] name = "mypy" -version = "0.971" +version = "0.982" description = "Optional static typing for Python" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] mypy-extensions = ">=0.4.3" @@ -391,7 +405,7 @@ py = ">=1.4,<2.0.0" virtualenv = ">=14" [package.extras] -tox_to_nox = ["jinja2", "tox"] +tox_to_nox = ["tox", "jinja2"] [[package]] name = "packaging" @@ -406,11 +420,11 @@ pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" [[package]] name = "pathspec" -version = "0.9.0" +version = "0.10.1" description = "Utility library for gitignore style pattern matching of file paths." category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = ">=3.7" [[package]] name = "platformdirs" @@ -462,22 +476,25 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "pygments" -version = "2.12.0" +version = "2.13.0" description = "Pygments is a syntax highlighting package written in Python." category = "dev" optional = false python-versions = ">=3.6" +[package.extras] +plugins = ["importlib-metadata"] + [[package]] name = "pylint" -version = "2.14.5" +version = "2.15.3" description = "python code static checker" category = "dev" optional = false python-versions = ">=3.7.2" [package.dependencies] -astroid = ">=2.11.6,<=2.12.0-dev0" +astroid = ">=2.12.10,<=2.14.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = ">=0.2" isort = ">=4.2.5,<6" @@ -533,7 +550,7 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xm [[package]] name = "pytest-cov" -version = "3.0.0" +version = "4.0.0" description = "Pytest plugin for measuring coverage." category = "dev" optional = false @@ -571,7 +588,7 @@ pytest = ">=3.0.0,<8.0.0" [[package]] name = "pytz" -version = "2022.2" +version = "2022.4" description = "World timezone definitions, modern and historical" category = "dev" optional = false @@ -579,7 +596,7 @@ python-versions = "*" [[package]] name = "pyupgrade" -version = "2.37.3" +version = "3.0.0" description = "A tool to automatically upgrade syntax for newer versions." category = "dev" optional = false @@ -650,7 +667,7 @@ python-versions = ">=3.6" [[package]] name = "sphinx" -version = "5.1.1" +version = "5.2.3" description = "Python documentation generator" category = "dev" optional = false @@ -658,16 +675,16 @@ python-versions = ">=3.6" [package.dependencies] alabaster = ">=0.7,<0.8" -babel = ">=1.3" -colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} +babel = ">=2.9" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} docutils = ">=0.14,<0.20" -imagesize = "*" -importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} -Jinja2 = ">=2.3" -packaging = "*" -Pygments = ">=2.0" +imagesize = ">=1.3" +importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} +Jinja2 = ">=3.0" +packaging = ">=21.0" +Pygments = ">=2.12" requests = ">=2.5.0" -snowballstemmer = ">=1.1" +snowballstemmer = ">=2.0" sphinxcontrib-applehelp = "*" sphinxcontrib-devhelp = "*" sphinxcontrib-htmlhelp = ">=2.0.0" @@ -677,19 +694,19 @@ sphinxcontrib-serializinghtml = ">=1.1.5" [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["flake8 (>=3.5.0)", "flake8-comprehensions", "flake8-bugbear", "isort", "mypy (>=0.971)", "sphinx-lint", "docutils-stubs", "types-typed-ast", "types-requests"] -test = ["pytest (>=4.6)", "html5lib", "cython", "typed-ast"] +lint = ["flake8 (>=3.5.0)", "flake8-comprehensions", "flake8-bugbear", "flake8-simplify", "isort", "mypy (>=0.981)", "sphinx-lint", "docutils-stubs", "types-typed-ast", "types-requests"] +test = ["pytest (>=4.6)", "html5lib", "typed-ast", "cython"] [[package]] name = "sphinx-basic-ng" -version = "0.0.1a12" +version = "1.0.0b1" description = "A modern skeleton for Sphinx themes." category = "dev" optional = false python-versions = ">=3.7" [package.dependencies] -sphinx = ">=4.0,<6.0" +sphinx = ">=4.0" [package.extras] docs = ["furo", "myst-parser", "sphinx-copybutton", "sphinx-inline-tabs", "ipython"] @@ -718,8 +735,8 @@ optional = false python-versions = ">=3.5" [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest"] +lint = ["docutils-stubs", "mypy", "flake8"] [[package]] name = "sphinxcontrib-devhelp" @@ -730,8 +747,8 @@ optional = false python-versions = ">=3.5" [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest"] +lint = ["docutils-stubs", "mypy", "flake8"] [[package]] name = "sphinxcontrib-htmlhelp" @@ -742,8 +759,8 @@ optional = false python-versions = ">=3.6" [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] -test = ["pytest", "html5lib"] +test = ["html5lib", "pytest"] +lint = ["docutils-stubs", "mypy", "flake8"] [[package]] name = "sphinxcontrib-jsmath" @@ -754,7 +771,7 @@ optional = false python-versions = ">=3.5" [package.extras] -test = ["pytest", "flake8", "mypy"] +test = ["mypy", "flake8", "pytest"] [[package]] name = "sphinxcontrib-qthelp" @@ -765,8 +782,8 @@ optional = false python-versions = ">=3.5" [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest"] +lint = ["docutils-stubs", "mypy", "flake8"] [[package]] name = "sphinxcontrib-serializinghtml" @@ -777,36 +794,48 @@ optional = false python-versions = ">=3.5" [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest"] +lint = ["docutils-stubs", "mypy", "flake8"] [[package]] name = "sqlalchemy" -version = "1.3.24" +version = "1.4.41" description = "Database Abstraction Library" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} [package.extras] +aiomysql = ["greenlet (!=0.4.17)", "aiomysql"] +aiosqlite = ["typing_extensions (!=3.10.0.1)", "greenlet (!=0.4.17)", "aiosqlite"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["greenlet (!=0.4.17)", "asyncmy (>=0.2.3,!=0.2.4)"] +mariadb_connector = ["mariadb (>=1.0.1,!=1.1.2)"] mssql = ["pyodbc"] mssql_pymssql = ["pymssql"] mssql_pyodbc = ["pyodbc"] -mysql = ["mysqlclient"] -oracle = ["cx-oracle"] -postgresql = ["psycopg2"] -postgresql_pg8000 = ["pg8000 (<1.16.6)"] +mypy = ["sqlalchemy2-stubs", "mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0,<2)", "mysqlclient (>=1.4.0)"] +mysql_connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=7,<8)", "cx_oracle (>=7)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql_asyncpg = ["greenlet (!=0.4.17)", "asyncpg"] +postgresql_pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] postgresql_psycopg2binary = ["psycopg2-binary"] postgresql_psycopg2cffi = ["psycopg2cffi"] pymysql = ["pymysql (<1)", "pymysql"] +sqlcipher = ["sqlcipher3-binary"] [[package]] name = "tokenize-rt" -version = "4.2.1" +version = "5.0.0" description = "A wrapper around the stdlib `tokenize` which roundtrips." category = "dev" optional = false -python-versions = ">=3.6.1" +python-versions = ">=3.7" [[package]] name = "toml" @@ -826,7 +855,7 @@ python-versions = ">=3.7" [[package]] name = "tomlkit" -version = "0.11.3" +version = "0.11.5" description = "Style preserving TOML library" category = "dev" optional = false @@ -854,7 +883,7 @@ python-versions = ">=3.7" [[package]] name = "urllib3" -version = "1.26.11" +version = "1.26.12" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "dev" optional = false @@ -862,7 +891,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, [package.extras] brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] -secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "urllib3-secure-extra", "ipaddress"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] @@ -886,7 +915,7 @@ python-versions = "*" [[package]] name = "virtualenv" -version = "20.16.3" +version = "20.16.5" description = "Virtual Python Environment builder" category = "dev" optional = false @@ -927,25 +956,22 @@ turbodbc = ["turbodbc"] [metadata] lock-version = "1.1" python-versions = ">=3.8,<4.0" -content-hash = "7071474c13a81d609b6ed047e6cd242858d551105d3b50e00d3129e368016cd5" +content-hash = "4f3cb083b073c21a92a78851472ef3c62db4655c985f393e23b90f51fc147391" [metadata.files] -alabaster = [ - {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, - {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, -] +alabaster = [] argcomplete = [] astroid = [] atomicwrites = [] attrs = [] babel = [] -beautifulsoup4 = [ - {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"}, - {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"}, -] +beautifulsoup4 = [] black = [] certifi = [] -cfgv = [] +cfgv = [ + {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, + {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, +] charset-normalizer = [] click = [] click-log = [] @@ -953,35 +979,24 @@ colorama = [ {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, ] -colorlog = [ - {file = "colorlog-6.6.0-py2.py3-none-any.whl", hash = "sha256:351c51e866c86c3217f08e4b067a7974a678be78f07f85fc2d55b8babde6d94e"}, - {file = "colorlog-6.6.0.tar.gz", hash = "sha256:344f73204009e4c83c5b6beb00b3c45dc70fcdae3c80db919e0a4171d006fde8"}, -] +colorlog = [] coverage = [] dill = [] distlib = [] docutils = [] filelock = [] furo = [] +greenlet = [] identify = [] -idna = [ - {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, - {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, -] +idna = [] imagesize = [] importlib-metadata = [] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] -isort = [ - {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, - {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, -] -jinja2 = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, -] +isort = [] +jinja2 = [] lazy-object-proxy = [] markupsafe = [ {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, @@ -1031,16 +1046,16 @@ mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, ] -nodeenv = [] +nodeenv = [ + {file = "nodeenv-1.7.0-py2.py3-none-any.whl", hash = "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e"}, + {file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"}, +] nox = [] packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] -pathspec = [ - {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, - {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, -] +pathspec = [] platformdirs = [ {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, @@ -1054,10 +1069,7 @@ py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] -pygments = [ - {file = "Pygments-2.12.0-py3-none-any.whl", hash = "sha256:dc9c10fb40944260f6ed4c688ece0cd2048414940f1cea51b8b226318411c519"}, - {file = "Pygments-2.12.0.tar.gz", hash = "sha256:5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb"}, -] +pygments = [] pylint = [] pyodbc = [] pyparsing = [ @@ -1066,7 +1078,10 @@ pyparsing = [ ] pytest = [] pytest-cov = [] -pytest-json-report = [] +pytest-json-report = [ + {file = "pytest-json-report-1.5.0.tar.gz", hash = "sha256:2dde3c647851a19b5f3700729e8310a6e66efb2077d674f27ddea3d34dc615de"}, + {file = "pytest_json_report-1.5.0-py3-none-any.whl", hash = "sha256:9897b68c910b12a2e48dd849f9a284b2c79a732a8a9cb398452ddd23d3c8c325"}, +] pytest-metadata = [] pytz = [] pyupgrade = [] @@ -1107,41 +1122,17 @@ pyyaml = [ ] requests = [] scriv = [] -snowballstemmer = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, -] -soupsieve = [ - {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, - {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, -] +snowballstemmer = [] +soupsieve = [] sphinx = [] sphinx-basic-ng = [] sphinx-copybutton = [] -sphinxcontrib-applehelp = [ - {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, - {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, -] -sphinxcontrib-devhelp = [ - {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, - {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, -] -sphinxcontrib-htmlhelp = [ - {file = "sphinxcontrib-htmlhelp-2.0.0.tar.gz", hash = "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"}, - {file = "sphinxcontrib_htmlhelp-2.0.0-py2.py3-none-any.whl", hash = "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07"}, -] -sphinxcontrib-jsmath = [ - {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, - {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, -] -sphinxcontrib-qthelp = [ - {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, - {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, -] -sphinxcontrib-serializinghtml = [ - {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, - {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, -] +sphinxcontrib-applehelp = [] +sphinxcontrib-devhelp = [] +sphinxcontrib-htmlhelp = [] +sphinxcontrib-jsmath = [] +sphinxcontrib-qthelp = [] +sphinxcontrib-serializinghtml = [] sqlalchemy = [] tokenize-rt = [] toml = [ @@ -1157,7 +1148,74 @@ turbodbc = [] typing-extensions = [] urllib3 = [] urlscan = [] -urwid = [] +urwid = [ + {file = "urwid-2.1.2.tar.gz", hash = "sha256:588bee9c1cb208d0906a9f73c613d2bd32c3ed3702012f51efe318a3f2127eae"}, +] virtualenv = [] -wrapt = [] +wrapt = [ + {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1"}, + {file = "wrapt-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320"}, + {file = "wrapt-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2"}, + {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4"}, + {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069"}, + {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310"}, + {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f"}, + {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656"}, + {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"}, + {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"}, + {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d"}, + {file = "wrapt-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7"}, + {file = "wrapt-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00"}, + {file = "wrapt-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4"}, + {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1"}, + {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1"}, + {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff"}, + {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d"}, + {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1"}, + {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569"}, + {file = "wrapt-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed"}, + {file = "wrapt-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471"}, + {file = "wrapt-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248"}, + {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68"}, + {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d"}, + {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77"}, + {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7"}, + {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015"}, + {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a"}, + {file = "wrapt-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853"}, + {file = "wrapt-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c"}, + {file = "wrapt-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456"}, + {file = "wrapt-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f"}, + {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc"}, + {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1"}, + {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"}, + {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b"}, + {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0"}, + {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57"}, + {file = "wrapt-1.14.1-cp38-cp38-win32.whl", hash = "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5"}, + {file = "wrapt-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d"}, + {file = "wrapt-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383"}, + {file = "wrapt-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7"}, + {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86"}, + {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735"}, + {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b"}, + {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3"}, + {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3"}, + {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe"}, + {file = "wrapt-1.14.1-cp39-cp39-win32.whl", hash = "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5"}, + {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"}, + {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, +] zipp = [] diff --git a/pyproject.toml b/pyproject.toml index e328f27c..1899c21b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -54,7 +54,7 @@ exclude = [] [tool.poetry.dependencies] python = ">=3.8,<4.0" -SQLAlchemy = ">=1.3.24,<1.4" +SQLAlchemy = ">=1.4" pyodbc = ">=4.0.34" packaging = "^21.3" turbodbc = {version = ">=4.5.4", optional = true} @@ -72,12 +72,12 @@ wheel = "^0.37.1" black = "^22.6.0" isort = "^5.10.1" pylint = "^2.14.5" -mypy = "^0.971" -pyupgrade = "^2.37.3" scriv = "^0.16.0" Sphinx = "^5.1.1" furo = "^2022.6.21" sphinx-copybutton = "^0.5.0" +mypy = "^0.982" +pyupgrade = "^3.0.0" [tool.poetry.extras] turbodbc = ["turbodbc"] @@ -87,8 +87,7 @@ turbodbc = ["turbodbc"] "exa.turbodbc" = "sqlalchemy_exasol.turbodbc:EXADialect_turbodbc" [tool.pytest.ini_options] -addopts= "--tb native -v -r fxX" -python_files= "test/*test_*.py" +addopts= "--tb native -v -r fxX --log-debug=sqlalchemy.engine --log-debug=sqlalchemy.pool" filterwarnings = [ "error::DeprecationWarning", "ignore::DeprecationWarning:sqlalchemy.testing.plugin.*", diff --git a/scripts/odbc.py b/scripts/odbc.py new file mode 100644 index 00000000..9df68a6e --- /dev/null +++ b/scripts/odbc.py @@ -0,0 +1,63 @@ +from __future__ import annotations + +import os +from contextlib import contextmanager +from pathlib import Path +from tempfile import TemporaryDirectory +from textwrap import dedent +from typing import ( + Iterable, + Iterator, + MutableMapping, +) + +from pyodbc import Connection + +ODBCINST_INI_TEMPLATE = dedent( + """ + [ODBC] + #Trace = yes + #TraceFile =~/odbc.trace + + [EXAODBC] + #Driver location will be appended in build environment: + DRIVER={driver} + + """ +) + + +def transaction(connection: Connection, sql_statements: Iterable[str]) -> None: + cur = connection.cursor() + for statement in sql_statements: + cur.execute(statement) + cur.commit() + cur.close() + + +@contextmanager +def environment(env_vars: dict[str, str]) -> Iterator[MutableMapping[str, str]]: + _env = os.environ.copy() + os.environ.update(env_vars) + yield os.environ + os.environ.clear() + os.environ.update(_env) + + +@contextmanager +def temporary_odbc_config(config: str) -> Iterator[Path]: + with TemporaryDirectory() as tmp_dir: + config_dir = Path(tmp_dir) / "odbcconfig" + config_dir.mkdir(exist_ok=True) + config_file = config_dir / "odbcinst.ini" + with open(config_file, "w") as f: + f.write(config) + yield config_file + + +@contextmanager +def odbcconfig(driver: Path) -> Iterator[tuple[Path, MutableMapping[str, str]]]: + with temporary_odbc_config(ODBCINST_INI_TEMPLATE.format(driver=driver)) as cfg: + env_vars = {"ODBCSYSINI": f"{cfg.parent.resolve()}"} + with environment(env_vars) as env: + yield cfg, env diff --git a/setup.cfg b/setup.cfg index bb0aea60..b5b315ed 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,3 +1,7 @@ +[db] +exasol-pyodbc=exa+pyodbc://sys:exasol@localhost:8888/TEST?CONNECTIONCALL=en_US.UTF-8&DRIVER=EXAODBC&SSLCertificate=SSL_VERIFY_NONE +exasol-turbodbc=exa+turbodbc://sys:exasol@localhost:8888/TEST?CONNECTIONCALL=en_US.UTF-8&DRIVER=EXAODBC&SSLCertificate=SSL_VERIFY_NONE + [sqla_testing] requirement_cls=sqlalchemy_exasol.requirements:Requirements profile_file=.profiles.txt diff --git a/sqlalchemy_exasol/base.py b/sqlalchemy_exasol/base.py index feb6c9ad..a93e506e 100644 --- a/sqlalchemy_exasol/base.py +++ b/sqlalchemy_exasol/base.py @@ -731,7 +731,7 @@ def get_lastrowid(self): def _get_schema(sql_compiler, dialect): """Get the schema while taking the translation-map and the de-normalization into account""" translate_map = sql_compiler.schema_translate_map - schema_dispatcher = translate_map.map_ if translate_map else {} + schema_dispatcher = translate_map if translate_map else {} schema = sql_compiler.statement.table.schema schema = ( schema_dispatcher[schema] if schema in schema_dispatcher else schema @@ -762,48 +762,52 @@ def pre_exec(self): Note: Parameter replacement is done for server versions < 4.1.8 or in case a delete query is executed. """ - if self.isdelete or self.root_connection.dialect.server_version_info < ( - 4, - 1, - 8, - ): - db_query = self.unicode_statement - for i in range(1, len(self.parameters)): - db_query += ", (" + ", ".join(["?"] * len(self.parameters[i])) + ")" - for db_para in self.parameters: - for value in db_para: - ident = "?" - if value is None: - db_query = db_query.replace(ident, "NULL", 1) - elif isinstance(value, int): - db_query = db_query.replace(ident, str(value), 1) - elif isinstance(value, (float, Decimal)): - db_query = db_query.replace(ident, str(float(value)), 1) - elif isinstance(value, bool): - db_query = db_query.replace(ident, "1" if value else "0", 1) - elif isinstance(value, datetime): - db_query = db_query.replace( - ident, - "to_timestamp('%s', 'YYYY-MM-DD HH24:MI:SS.FF6')" - % value.strftime("%Y-%m-%d %H:%M:%S.%f"), - 1, - ) - elif isinstance(value, date): - db_query = db_query.replace( - ident, - "to_date('%s', 'YYYY-MM-DD')" % value.strftime("%Y-%m-%d"), - 1, - ) - elif isinstance(value, bytes): - db_query = db_query.replace( - ident, "'%s'" % value.decode("UTF-8"), 1 - ) - elif isinstance(value, str): - db_query = db_query.replace(ident, "'%s'" % value, 1) - else: - raise TypeError("Data type not supported: %s" % type(value)) - self.statement = db_query - self.parameters = [[]] + server_version = self.root_connection.dialect.server_version_info + # FIXME: drop exasol verison support < 4.1.0 + # see https://github.com/exasol/sqlalchemy-exasol/pull/191#discussion_r942595818 + skip_pre_exec = ( + not self.isdelete and server_version is None or server_version >= (4, 1, 8) + ) + if skip_pre_exec: + return + + db_query = self.unicode_statement + for i in range(1, len(self.parameters)): + db_query += ", (" + ", ".join(["?"] * len(self.parameters[i])) + ")" + for db_para in self.parameters: + for value in db_para: + ident = "?" + if value is None: + db_query = db_query.replace(ident, "NULL", 1) + elif isinstance(value, int): + db_query = db_query.replace(ident, str(value), 1) + elif isinstance(value, (float, Decimal)): + db_query = db_query.replace(ident, str(float(value)), 1) + elif isinstance(value, bool): + db_query = db_query.replace(ident, "1" if value else "0", 1) + elif isinstance(value, datetime): + db_query = db_query.replace( + ident, + "to_timestamp('%s', 'YYYY-MM-DD HH24:MI:SS.FF6')" + % value.strftime("%Y-%m-%d %H:%M:%S.%f"), + 1, + ) + elif isinstance(value, date): + db_query = db_query.replace( + ident, + "to_date('%s', 'YYYY-MM-DD')" % value.strftime("%Y-%m-%d"), + 1, + ) + elif isinstance(value, bytes): + db_query = db_query.replace( + ident, "'%s'" % value.decode("UTF-8"), 1 + ) + elif isinstance(value, str): + db_query = db_query.replace(ident, "'%s'" % value, 1) + else: + raise TypeError("Data type not supported: %s" % type(value)) + self.statement = db_query + self.parameters = [[]] def should_autocommit_text(self, statement): return AUTOCOMMIT_REGEXP.match(statement) diff --git a/sqlalchemy_exasol/merge.py b/sqlalchemy_exasol/merge.py index 50411d30..66b3ff7a 100644 --- a/sqlalchemy_exasol/merge.py +++ b/sqlalchemy_exasol/merge.py @@ -1,3 +1,8 @@ +# Refactoring notes +# * Replace with list comprehension https://github.com/exasol/sqlalchemy-exasol/pull/191#discussion_r948769723 +# * Replace with dict comprehension https://github.com/exasol/sqlalchemy-exasol/pull/191#discussion_r948792860 +# * Replace with dict comprehension https://github.com/exasol/sqlalchemy-exasol/pull/191#discussion_r948796627 +# * Reconsider collecting tree elements https://github.com/exasol/sqlalchemy-exasol/pull/191#discussion_r948793529 from sqlalchemy.ext.compiler import compiles from sqlalchemy.schema import Column from sqlalchemy.sql import crud @@ -11,35 +16,36 @@ class Merge(UpdateBase): - __visit_name__ = "merge" + class Delete(UpdateBase): + def __init__(self, where=None): + self.where = where + def __init__(self, target_table, source_expr, on): - self._target_table = target_table + self.table = target_table self._source_expr = source_expr self._on = on - self._on_columns = [] - elements_to_check = list(on.get_children()) - for e in elements_to_check: - if isinstance(e, Column): - if e.table == self._target_table: - self._on_columns.append(e) - else: - elements_to_check.extend(e.get_children()) + self._on_columns = list( + element + for element in on.get_children() + if isinstance(element, Column) and element.table == self.table + ) self._merge_update_values = None self._update_where = None self._merge_insert_values = None self._insert_where = None self._merge_delete = False self._delete_where = None + self._delete = None - def _get_source_cols(self): - source_cols = {} - elements_to_check = list(self._source_expr.get_children()) - for e in elements_to_check: - if isinstance(e, ColumnClause): - source_cols[e.name] = e - return source_cols + def _source_columns(self): + elements = list( + element + for element in self._source_expr.get_children() + if isinstance(element, ColumnClause) + ) + return {element.name: element for element in elements} @_generative def update(self, values=None, where=None): @@ -47,17 +53,20 @@ def update(self, values=None, where=None): # present in the source expression but exclude columns part # of the MERGE ON condition if values is None: - source_cols = self._get_source_cols() values = {} - for c in self._target_table.c: - if c not in self._on_columns and c.name in source_cols: - values[c] = source_cols[c.name] - self._merge_update_values = ValuesBase(self._target_table, values, []) + src_columns = self._source_columns() + columns = ( + column + for column in self.table.c + if column not in self._on_columns and column.name in src_columns + ) + [values.update({column: src_columns[column.name]}) for column in columns] + self._merge_update_values = ValuesBase(self.table, values, []) if where is not None: if self._merge_delete: - self._delete_where = self._append_where(self._delete_where, where) + self._delete_where = Merge._append_where(self._delete_where, where) else: - self._update_where = self._append_where(self._update_where, where) + self._update_where = Merge._append_where(self._update_where, where) @_generative def insert(self, values=None, where=None): @@ -65,27 +74,68 @@ def insert(self, values=None, where=None): # present in the source expression but exclude columns part # of the MERGE ON condition if values is None: - source_cols = self._get_source_cols() values = {} - for c in self._target_table.c: - if c.name in source_cols: - values[c] = source_cols[c.name] - self._merge_insert_values = ValuesBase(self._target_table, values, []) + src_columns = self._source_columns() + columns = (column for column in self.table.c if column.name in src_columns) + [values.update({column: src_columns[column.name]}) for column in columns] + self._merge_insert_values = ValuesBase(self.table, values, []) if where is not None: - self._insert_where = self._append_where(self._insert_where, where) + self._insert_where = Merge._append_where(self._insert_where, where) @_generative def delete(self, where=None): self._merge_delete = True - if self._merge_update_values is None and where is not None: - self._delete_where = self._append_where(self._delete_where, where) + self.delete_clause = Merge.Delete(where) - def _append_where(self, where, addition): - if where is not None: - where = and_(where, addition) + @staticmethod + def _append_where(where, addition): + if where is None: + return addition + return and_(where, addition) + + def _is_merge_update(self): + return self._merge_update_values is not None + + def _is_merge_delete(self): + return not self._is_merge_update() and self._merge_delete + + def _is_merge_insert(self): + return self._merge_insert_values is not None + + def _compile_update(self, compiler): + columns = crud._get_crud_params(compiler, self._merge_update_values) + sql = "" + sql += "\nWHEN MATCHED THEN UPDATE SET " + sql += ", ".join(compiler.visit_column(c[0]) + "=" + c[1] for c in columns) + if self._merge_delete: + sql += "\nDELETE " + if self._delete_where is not None: + sql += " WHERE %s" % compiler.process(self._delete_where) else: - where = addition - return where + if self._update_where is not None: + sql += " WHERE %s" % compiler.process(self._update_where) + return sql + + def _compile_insert(self, compiler): + columns = crud._get_crud_params(compiler, self._merge_insert_values) + sql = "\nWHEN NOT MATCHED THEN INSERT " + sql += "(%s) " % ", ".join(compiler.visit_column(c[0]) for c in columns) + sql += "VALUES (%s) " % ", ".join(c[1] for c in columns) + if self._insert_where is not None: + sql += "WHERE %s" % compiler.process(self._insert_where) + return sql + + def visit(self, compiler): + sql = "MERGE INTO %s " % compiler.process(self.table, asfrom=True) + sql += "USING %s " % compiler.process(self._source_expr, asfrom=True) + sql += "ON ( %s ) " % compiler.process(self._on) + if self._is_merge_update(): + sql += self._compile_update(compiler) + if self._is_merge_delete(): + sql += compiler.process(self.delete_clause) + if self._is_merge_insert(): + sql += self._compile_insert(compiler) + return sql def merge(target_table, source_expr, on): @@ -93,33 +143,13 @@ def merge(target_table, source_expr, on): @compiles(Merge, "exasol") -def visit_merge(element, compiler, **kw): - msql = "MERGE INTO %s " % compiler.process(element._target_table, asfrom=True) - msql += "USING %s " % compiler.process(element._source_expr, asfrom=True) - msql += "ON ( %s ) " % compiler.process(element._on) - - if element._merge_update_values is not None: - cols = crud._get_crud_params(compiler, element._merge_update_values) - msql += "\nWHEN MATCHED THEN UPDATE SET " - msql += ", ".join(compiler.visit_column(c[0]) + "=" + c[1] for c in cols) - if element._merge_delete: - msql += "\nDELETE " - if element._delete_where is not None: - msql += " WHERE %s" % compiler.process(element._delete_where) - else: - if element._update_where is not None: - msql += " WHERE %s" % compiler.process(element._update_where) - else: - if element._merge_delete: - msql += "\nWHEN MATCHED THEN DELETE " - if element._delete_where is not None: - msql += "WHERE %s" % compiler.process(element._delete_where) - if element._merge_insert_values is not None: - cols = crud._get_crud_params(compiler, element._merge_insert_values) - msql += "\nWHEN NOT MATCHED THEN INSERT " - msql += "(%s) " % ", ".join(compiler.visit_column(c[0]) for c in cols) - msql += "VALUES (%s) " % ", ".join(c[1] for c in cols) - if element._insert_where is not None: - msql += "WHERE %s" % compiler.process(element._insert_where) - - return msql +def visit_merge(node, compiler, **_): + return node.visit(compiler) + + +@compiles(Merge.Delete, "exasol") +def visit_merge_delete(node, compiler, **_): + sql = "\nWHEN MATCHED THEN DELETE " + if node.where is not None: + sql += "WHERE %s" % compiler.process(node.where) + return sql diff --git a/sqlalchemy_exasol/pyodbc.py b/sqlalchemy_exasol/pyodbc.py index c243ee98..965ccebd 100644 --- a/sqlalchemy_exasol/pyodbc.py +++ b/sqlalchemy_exasol/pyodbc.py @@ -25,6 +25,7 @@ class EXADialect_pyodbc(EXADialect, PyODBCConnector): + supports_statement_cache = False execution_ctx_cls = EXAExecutionContext driver_version = None diff --git a/sqlalchemy_exasol/requirements.py b/sqlalchemy_exasol/requirements.py index 8ee1897b..7ee139f9 100644 --- a/sqlalchemy_exasol/requirements.py +++ b/sqlalchemy_exasol/requirements.py @@ -198,3 +198,33 @@ def standalone_null_binds_whereclause(self): WHERE clause, in situations where it has to be typed. """ return exclusions.closed() + + @property + def binary_literals(self): + """target backend supports simple binary literals, e.g. an + expression like:: + + SELECT CAST('foo' AS BINARY) + + Where ``BINARY`` is the type emitted from :class:`.LargeBinary`, + e.g. it could be ``BLOB`` or similar. + + Basically fails on Oracle. + + """ + return skip_if( + BooleanPredicate( + True, """A binary type is not natively supported by the EXASOL DB""" + ) + ) + + @property + def binary_comparisons(self): + """target database/driver can allow BLOB/BINARY fields to be compared + against a bound parameter value. + """ + return skip_if( + BooleanPredicate( + True, """A binary type is not natively supported by the EXASOL DB""" + ) + ) diff --git a/sqlalchemy_exasol/turbodbc.py b/sqlalchemy_exasol/turbodbc.py index b9b0dffa..04ad095f 100644 --- a/sqlalchemy_exasol/turbodbc.py +++ b/sqlalchemy_exasol/turbodbc.py @@ -67,6 +67,7 @@ class EXADialect_turbodbc(EXADialect): driver = "turbodbc" driver_version = None server_version_info = None + supports_statement_cache = False supports_native_decimal = False supports_sane_multi_rowcount = False diff --git a/test/__init__.py b/test/__init__.py deleted file mode 100644 index 6684f9d6..00000000 --- a/test/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -""" -Created on Aug 13, 2012 - -@author: Blue Yonder GmbH -""" diff --git a/test/conftest.py b/test/integration/conftest.py similarity index 100% rename from test/conftest.py rename to test/integration/conftest.py diff --git a/test/integration/exasol/test_certificate.py b/test/integration/exasol/test_certificate.py new file mode 100644 index 00000000..bf8a7f12 --- /dev/null +++ b/test/integration/exasol/test_certificate.py @@ -0,0 +1,46 @@ +import copy + +import pytest +import sqlalchemy.exc +from sqlalchemy import ( + create_engine, + testing, +) +from sqlalchemy.testing.fixtures import ( + TestBase, + config, +) + + +class CertificateTest(TestBase): + @staticmethod + def remove_ssl_settings(url): + """Create an equivalent url without the ssl/tls settings.""" + # Note: + # This implementation is not backwards compatible with SQLA < 1.4, if you are looking for a + # backwards compatible solution see: + # * https://docs.sqlalchemy.org/en/14/changelog/migration_14.html#the-url-object-is-now-immutable + query = dict(url.query) + try: + del query["SSLCertificate"] + except KeyError: + # nothing to do + pass + return url.set(query=query) + + @pytest.mark.skipif( + testing.db.dialect.server_version_info < (7, 1, 0), + reason="DB version(s) before 7.1.0 don't enforce ssl/tls", + ) + def test_db_connection_fails_with_default_settings_for_self_signed_certificates( + self, + ): + url = self.remove_ssl_settings(config.db.url) + + engine = create_engine(url) + with pytest.raises(sqlalchemy.exc.DBAPIError) as exec_info: + # we expect the connect call to fail, but want to close it in case it succeeds + with engine.connect() as conn: + pass + + assert "self signed certificate" in f"{exec_info.value}" diff --git a/test/test_deadlock.py b/test/integration/exasol/test_deadlock.py similarity index 93% rename from test/test_deadlock.py rename to test/integration/exasol/test_deadlock.py index 88f5a961..61a54041 100644 --- a/test/test_deadlock.py +++ b/test/integration/exasol/test_deadlock.py @@ -3,8 +3,10 @@ import pytest import sqlalchemy.testing as testing -from sqlalchemy import create_engine -from sqlalchemy.engine.reflection import Inspector +from sqlalchemy import ( + create_engine, + inspect, +) from sqlalchemy.testing import ( config, fixtures, @@ -31,7 +33,7 @@ def create_transaction(self, url, con_name): def test_no_deadlock_for_get_table_names_without_fallback(self): def without_fallback(session2, schema, table): - dialect = Inspector(session2).dialect + dialect = inspect(session2).dialect dialect.get_table_names(session2, schema=schema, use_sql_fallback=False) self.run_deadlock_for_table(without_fallback) @@ -43,7 +45,7 @@ def without_fallback(session2, schema, table): ) def test_deadlock_for_get_table_names_with_fallback(self): def with_fallback(session2, schema, table): - dialect = Inspector(session2).dialect + dialect = inspect(session2).dialect dialect.get_table_names(session2, schema=schema, use_sql_fallback=True) with pytest.raises(Exception): @@ -55,14 +57,14 @@ def with_fallback(session2, schema, table): ) def test_no_deadlock_for_get_table_names_with_fallback(self): def with_fallback(session2, schema, table): - dialect = Inspector(session2).dialect + dialect = inspect(session2).dialect dialect.get_table_names(session2, schema=schema, use_sql_fallback=True) self.run_deadlock_for_table(with_fallback) def test_no_deadlock_for_get_columns_without_fallback(self): def without_fallback(session2, schema, table): - dialect = Inspector(session2).dialect + dialect = inspect(session2).dialect dialect.get_columns( session2, schema=schema, table_name=table, use_sql_fallback=False ) @@ -72,7 +74,7 @@ def without_fallback(session2, schema, table): def test_no_deadlock_for_get_columns_with_fallback(self): # TODO: Doesnt produce a deadlock anymore since last commit? def with_fallback(session2, schema, table): - dialect = Inspector(session2).dialect + dialect = inspect(session2).dialect dialect.get_columns( session2, schema=schema, table_name=table, use_sql_fallback=True ) @@ -81,7 +83,7 @@ def with_fallback(session2, schema, table): def test_no_deadlock_for_get_pk_constraint_without_fallback(self): def without_fallback(session2, schema, table): - dialect = Inspector(session2).dialect + dialect = inspect(session2).dialect dialect.get_pk_constraint( session2, table_name=table, schema=schema, use_sql_fallback=False ) @@ -90,7 +92,7 @@ def without_fallback(session2, schema, table): def test_no_deadlock_for_get_pk_constraint_with_fallback(self): def with_fallback(session2, schema, table): - dialect = Inspector(session2).dialect + dialect = inspect(session2).dialect dialect.get_pk_constraint( session2, table_name=table, schema=schema, use_sql_fallback=True ) @@ -99,7 +101,7 @@ def with_fallback(session2, schema, table): def test_no_deadlock_for_get_foreign_keys_without_fallback(self): def without_fallback(session2, schema, table): - dialect = Inspector(session2).dialect + dialect = inspect(session2).dialect dialect.get_foreign_keys( session2, table_name=table, schema=schema, use_sql_fallback=False ) @@ -108,7 +110,7 @@ def without_fallback(session2, schema, table): def test_no_deadlock_for_get_foreign_keys_with_fallback(self): def with_fallback(session2, schema, table): - dialect = Inspector(session2).dialect + dialect = inspect(session2).dialect dialect.get_foreign_keys( session2, table_name=table, schema=schema, use_sql_fallback=True ) @@ -118,7 +120,7 @@ def with_fallback(session2, schema, table): def test_no_deadlock_for_get_view_names_without_fallback(self): # TODO: think of other scenarios where metadata deadlocks with view could happen def without_fallback(session2, schema, table): - dialect = Inspector(session2).dialect + dialect = inspect(session2).dialect dialect.get_view_names( session2, table_name=table, schema=schema, use_sql_fallback=False ) @@ -128,7 +130,7 @@ def without_fallback(session2, schema, table): def test_no_deadlock_for_get_view_names_with_fallback(self): # TODO: think of other scenarios where metadata deadlocks with view could happen def with_fallback(session2, schema, table): - dialect = Inspector(session2).dialect + dialect = inspect(session2).dialect dialect.get_view_names( session2, table_name=table, schema=schema, use_sql_fallback=True ) diff --git a/test/test_exadialect_pyodbc.py b/test/integration/exasol/test_exadialect_pyodbc.py similarity index 100% rename from test/test_exadialect_pyodbc.py rename to test/integration/exasol/test_exadialect_pyodbc.py diff --git a/test/test_exadialect_turbodbc.py b/test/integration/exasol/test_exadialect_turbodbc.py similarity index 100% rename from test/test_exadialect_turbodbc.py rename to test/integration/exasol/test_exadialect_turbodbc.py diff --git a/test/test_exasol.py b/test/integration/exasol/test_exasol.py similarity index 100% rename from test/test_exasol.py rename to test/integration/exasol/test_exasol.py diff --git a/test/test_get_metadata_functions.py b/test/integration/exasol/test_get_metadata_functions.py similarity index 94% rename from test/test_get_metadata_functions.py rename to test/integration/exasol/test_get_metadata_functions.py index 9c40d50b..29ac5389 100644 --- a/test/test_get_metadata_functions.py +++ b/test/integration/exasol/test_get_metadata_functions.py @@ -1,6 +1,8 @@ import pytest -from sqlalchemy import create_engine -from sqlalchemy.engine.reflection import Inspector +from sqlalchemy import ( + create_engine, + inspect, +) from sqlalchemy.engine.url import URL from sqlalchemy.sql.sqltypes import ( INTEGER, @@ -82,7 +84,7 @@ def generate_url_with_database_name(cls, connection, new_database_name): database_url = config.db_url new_args = database_url.translate_connect_args() new_args["database"] = new_database_name - new_database_url = URL( + new_database_url = URL.create( drivername=database_url.drivername, query=database_url.query, **new_args ) return new_database_url @@ -100,7 +102,7 @@ def create_engine_with_database_name(cls, connection, new_database_name): ) def test_get_schema_names(self, engine_name, use_sql_fallback): with self.engine_map[engine_name].begin() as c: - dialect = Inspector(c).dialect + dialect = inspect(c).dialect schema_names = dialect.get_schema_names( connection=c, use_sql_fallback=use_sql_fallback ) @@ -112,7 +114,7 @@ def test_get_schema_names(self, engine_name, use_sql_fallback): ) def test_compare_get_schema_names_for_sql_and_odbc(self, engine_name): with self.engine_map[engine_name].begin() as c: - dialect = Inspector(c).dialect + dialect = inspect(c).dialect schema_names_fallback = dialect.get_schema_names( connection=c, use_sql_fallback=True ) @@ -126,7 +128,7 @@ def test_compare_get_schema_names_for_sql_and_odbc(self, engine_name): ) def test_get_table_names(self, use_sql_fallback, engine_name): with self.engine_map[engine_name].begin() as c: - dialect = Inspector(c).dialect + dialect = inspect(c).dialect table_names = dialect.get_table_names( connection=c, schema=self.schema, use_sql_fallback=use_sql_fallback ) @@ -141,7 +143,7 @@ def test_compare_get_table_names_for_sql_and_odbc(self, schema, engine_name): with self.engine_map[engine_name].begin() as c: if schema is None: c.execute("OPEN SCHEMA %s" % self.schema) - dialect = Inspector(c).dialect + dialect = inspect(c).dialect table_names_fallback = dialect.get_table_names( connection=c, schema=schema, use_sql_fallback=True ) @@ -155,7 +157,7 @@ def test_compare_get_table_names_for_sql_and_odbc(self, schema, engine_name): ) def test_has_table_table_exists(self, use_sql_fallback, engine_name): with self.engine_map[engine_name].begin() as c: - dialect = Inspector(c).dialect + dialect = inspect(c).dialect has_table = dialect.has_table( connection=c, schema=self.schema, @@ -171,7 +173,7 @@ def test_has_table_table_exists(self, use_sql_fallback, engine_name): ) def test_has_table_table_exists_not(self, use_sql_fallback, engine_name): with self.engine_map[engine_name].begin() as c: - dialect = Inspector(c).dialect + dialect = inspect(c).dialect has_table = dialect.has_table( connection=c, schema=self.schema, @@ -189,7 +191,7 @@ def test_has_table_table_exists_not(self, use_sql_fallback, engine_name): ) def test_compare_has_table_for_sql_and_odbc(self, schema, engine_name): with self.engine_map[engine_name].begin() as c: - dialect = Inspector(c).dialect + dialect = inspect(c).dialect has_table_fallback = dialect.has_table( connection=c, schema=schema, use_sql_fallback=True, table_name="t" ) @@ -207,7 +209,7 @@ def test_compare_has_table_for_sql_and_odbc(self, schema, engine_name): ) def test_get_view_names(self, use_sql_fallback, engine_name): with self.engine_map[engine_name].begin() as c: - dialect = Inspector(c).dialect + dialect = inspect(c).dialect view_names = dialect.get_view_names( connection=c, schema=self.schema, use_sql_fallback=use_sql_fallback ) @@ -220,7 +222,7 @@ def test_get_view_names(self, use_sql_fallback, engine_name): ) def test_get_view_names_for_sys(self, use_sql_fallback, engine_name): with self.engine_map[engine_name].begin() as c: - dialect = Inspector(c).dialect + dialect = inspect(c).dialect view_names = dialect.get_view_names( connection=c, schema="sys", use_sql_fallback=use_sql_fallback ) @@ -233,7 +235,7 @@ def test_get_view_names_for_sys(self, use_sql_fallback, engine_name): ) def test_get_view_definition(self, use_sql_fallback, engine_name): with self.engine_map[engine_name].begin() as c: - dialect = Inspector(c).dialect + dialect = inspect(c).dialect view_definition = dialect.get_view_definition( connection=c, schema=self.schema, @@ -249,7 +251,7 @@ def test_get_view_definition(self, use_sql_fallback, engine_name): ) def test_get_view_definition_view_name_none(self, use_sql_fallback, engine_name): with self.engine_map[engine_name].begin() as c: - dialect = Inspector(c).dialect + dialect = inspect(c).dialect view_definition = dialect.get_view_definition( connection=c, schema=self.schema, @@ -265,7 +267,7 @@ def test_get_view_definition_view_name_none(self, use_sql_fallback, engine_name) ) def test_compare_get_view_names_for_sql_and_odbc(self, schema, engine_name): with self.engine_map[engine_name].begin() as c: - dialect = Inspector(c).dialect + dialect = inspect(c).dialect c.execute("OPEN SCHEMA %s" % self.schema) view_names_fallback = dialect.get_view_names( connection=c, schema=schema, use_sql_fallback=True @@ -283,7 +285,7 @@ def test_compare_get_view_definition_for_sql_and_odbc(self, schema, engine_name) if schema is None: c.execute("OPEN SCHEMA %s" % self.schema) view_name = "v" - dialect = Inspector(c).dialect + dialect = inspect(c).dialect view_definition_fallback = dialect.get_view_definition( connection=c, view_name=view_name, schema=schema, use_sql_fallback=True ) @@ -300,7 +302,7 @@ def test_compare_get_view_definition_for_sql_and_odbc(self, schema, engine_name) ) def test_compare_get_columns_for_sql_and_odbc(self, schema, table, engine_name): with self.engine_map[engine_name].begin() as c: - dialect = Inspector(c).dialect + dialect = inspect(c).dialect if schema is None: c.execute("OPEN SCHEMA %s" % self.schema) columns_fallback = dialect.get_columns( @@ -322,7 +324,7 @@ def test_compare_get_columns_none_table_for_sql_and_odbc(self, schema, engine_na with self.engine_map[engine_name].begin() as c: if schema is None: c.execute("OPEN SCHEMA %s" % self.schema) - dialect = Inspector(c).dialect + dialect = inspect(c).dialect table = None columns_fallback = dialect.get_columns( connection=c, table_name=table, schema=schema, use_sql_fallback=True @@ -347,7 +349,7 @@ def make_columns_comparable( ) def test_get_columns(self, use_sql_fallback, engine_name): with self.engine_map[engine_name].begin() as c: - dialect = Inspector(c).dialect + dialect = inspect(c).dialect columns = dialect.get_columns( connection=c, schema=self.schema, @@ -396,7 +398,7 @@ def test_get_columns(self, use_sql_fallback, engine_name): ) def test_get_columns_table_name_none(self, use_sql_fallback, engine_name): with self.engine_map[engine_name].begin() as c: - dialect = Inspector(c).dialect + dialect = inspect(c).dialect columns = dialect.get_columns( connection=c, schema=self.schema, @@ -417,7 +419,7 @@ def test_compare_get_pk_constraint_for_sql_and_odbc( with self.engine_map[engine_name].begin() as c: if schema is None: c.execute("OPEN SCHEMA %s" % self.schema) - dialect = Inspector(c).dialect + dialect = inspect(c).dialect pk_constraint_fallback = dialect.get_pk_constraint( connection=c, table_name=table, schema=schema, use_sql_fallback=True ) @@ -433,7 +435,7 @@ def test_compare_get_pk_constraint_for_sql_and_odbc( ) def test_get_pk_constraint(self, use_sql_fallback, engine_name): with self.engine_map[engine_name].begin() as c: - dialect = Inspector(c).dialect + dialect = inspect(c).dialect pk_constraint = dialect.get_pk_constraint( connection=c, schema=self.schema, @@ -452,7 +454,7 @@ def test_get_pk_constraint(self, use_sql_fallback, engine_name): ) def test_get_pk_constraint_table_name_none(self, use_sql_fallback, engine_name): with self.engine_map[engine_name].begin() as c: - dialect = Inspector(c).dialect + dialect = inspect(c).dialect pk_constraint = dialect.get_pk_constraint( connection=c, schema=self.schema, @@ -473,7 +475,7 @@ def test_compare_get_foreign_keys_for_sql_and_odbc( with self.engine_map[engine_name].begin() as c: if schema is None: c.execute("OPEN SCHEMA %s" % self.schema_2) - dialect = Inspector(c).dialect + dialect = inspect(c).dialect foreign_keys_fallback = dialect.get_foreign_keys( connection=c, table_name=table, schema=schema, use_sql_fallback=True ) @@ -489,7 +491,7 @@ def test_compare_get_foreign_keys_for_sql_and_odbc( ) def test_get_foreign_keys(self, use_sql_fallback, engine_name): with self.engine_map[engine_name].begin() as c: - dialect = Inspector(c).dialect + dialect = inspect(c).dialect foreign_keys = dialect.get_foreign_keys( connection=c, schema=self.schema, @@ -515,7 +517,7 @@ def test_get_foreign_keys(self, use_sql_fallback, engine_name): ) def test_get_foreign_keys_table_name_none(self, use_sql_fallback, engine_name): with self.engine_map[engine_name].begin() as c: - dialect = Inspector(c).dialect + dialect = inspect(c).dialect foreign_keys = dialect.get_foreign_keys( connection=c, schema=self.schema, diff --git a/test/test_large_metadata.py b/test/integration/exasol/test_large_metadata.py similarity index 100% rename from test/test_large_metadata.py rename to test/integration/exasol/test_large_metadata.py diff --git a/test/test_regression.py b/test/integration/exasol/test_regression.py similarity index 100% rename from test/test_regression.py rename to test/integration/exasol/test_regression.py diff --git a/test/test_update.py b/test/integration/exasol/test_update.py similarity index 92% rename from test/test_update.py rename to test/integration/exasol/test_update.py index 32c90685..0adca893 100644 --- a/test/test_update.py +++ b/test/integration/exasol/test_update.py @@ -1,5 +1,4 @@ -import unittest - +import pytest from sqlalchemy import * from sqlalchemy import testing from sqlalchemy.testing import ( @@ -70,12 +69,12 @@ def fixtures(cls): ) +@pytest.mark.skipif( + testing.db.dialect.driver == "turbodbc", reason="not supported by turbodbc" +) class UpdateTest(_UpdateTestBase, fixtures.TablesTest): __backend__ = True - @unittest.skipIf( - testing.db.dialect.driver == "turbodbc", "not supported by turbodbc" - ) def test_update_simple(self): """test simple update and assert that exasol returns the right rowcount""" users = self.tables.users @@ -86,9 +85,6 @@ def test_update_simple(self): assert result.rowcount == 1 self._assert_users(users, expected) - @unittest.skipIf( - testing.db.dialect.driver == "turbodbc", "not supported by turbodbc" - ) def test_update_simple_multiple_rows_rowcount(self): """test simple update and assert that exasol returns the right rowcount""" users = self.tables.users @@ -99,9 +95,6 @@ def test_update_simple_multiple_rows_rowcount(self): assert result.rowcount == 2 self._assert_users(users, expected) - @unittest.skipIf( - testing.db.dialect.driver == "turbodbc", "not supported by turbodbc" - ) def test_update_executemany(self): """test that update with executemany work as well, but rowcount is undefined for executemany updates""" diff --git a/test/test_suite.py b/test/integration/sqlalchemy/test_suite.py similarity index 100% rename from test/test_suite.py rename to test/integration/sqlalchemy/test_suite.py diff --git a/test/test_certificate.py b/test/test_certificate.py deleted file mode 100644 index 27fb6da9..00000000 --- a/test/test_certificate.py +++ /dev/null @@ -1,26 +0,0 @@ -import copy - -import pytest -import sqlalchemy.exc -from sqlalchemy import create_engine -from sqlalchemy.testing.fixtures import ( - TestBase, - config, -) - - -class CertificateTest(TestBase): - def test_db_connection_fails_with_default_settings_for_self_signed_certificates( - self, - ): - url = copy.deepcopy(config.db.url) - if "SSLCertificate" in url.query: - del url.query["SSLCertificate"] - - engine = create_engine(url) - with pytest.raises(sqlalchemy.exc.DBAPIError) as exec_info: - # we expect the connect call to fail, but want to close it in case it succeeds - with engine.connect() as conn: - pass - - assert "self signed certificate" in f"{exec_info.value}"