diff --git a/.github/workflows/ci_cd.yml b/.github/workflows/ci_cd.yml
new file mode 100644
index 0000000..1764120
--- /dev/null
+++ b/.github/workflows/ci_cd.yml
@@ -0,0 +1,148 @@
+name: Push commit
+on:
+ release:
+ types: [ created ]
+ push:
+ branches:
+ - '*'
+
+permissions:
+ contents: write
+ pages: write
+ id-token: write
+
+jobs:
+ ruff:
+ runs-on: ubuntu-22.04
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: '3.11'
+ - name: Run ruff
+ run: |
+ pip install ruff==0.5.1
+ ruff check -q
+
+ build:
+ runs-on: ubuntu-22.04
+ needs: [ ruff ]
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-python@v5
+ with:
+ python-version: '3.11'
+ - name: Install poetry
+ uses: abatilo/actions-poetry@v2
+ with:
+ poetry-version: 1.8.4
+ - name: Build
+ run: |
+ if [[ '${{ github.event_name }}' = 'release' ]]; then
+ sed -i -e "s/0.dev.0/$GITHUB_REF_NAME/g" pyproject.toml
+ fi
+ poetry build
+ mkdir ripley_build
+ mv ./dist ./ripley_build
+ - name: Upload ripley package
+ uses: actions/upload-artifact@v4
+ with:
+ name: dist
+ path: ripley_build
+ retention-days: 90
+ - name: Upload pyproject.toml
+ uses: actions/upload-artifact@v4
+ with:
+ name: pyproject.toml
+ path: pyproject.toml
+ retention-days: 90
+ - name: Upload README.md
+ uses: actions/upload-artifact@v4
+ with:
+ name: README.md
+ path: README.md
+ retention-days: 90
+
+ tests:
+ runs-on: ubuntu-22.04
+ permissions: write-all
+ needs: [ build ]
+ strategy:
+ fail-fast: false
+ matrix:
+ python-version: [ "3.8", "3.9", "3.10", "3.11", "3.12" ]
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/setup-python@v5
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: Install poetry
+ uses: abatilo/actions-poetry@v2
+ with:
+ poetry-version: 1.8.4
+ - name: Download ripley package
+ uses: actions/download-artifact@v4
+ with:
+ name: dist
+ - uses: hoverkraft-tech/compose-action@v2.0.2
+ with:
+ services: |
+ clickhouse
+ s3
+ up-flags: "-d"
+ - name: Install test dependencies, ripley and run tests
+ run: |
+ python -m pip install --upgrade pip setuptools wheel
+ rm -r ripley
+ poetry config virtualenvs.create false
+ poetry lock --no-update
+ poetry install --only=dev
+ pip install --no-index -f ./dist ripley
+ pip install pytest-cov
+ pytest --doctest-modules --junitxml=test-results-${{ matrix.python-version }}.xml
+ - name: Upload pytest results
+ uses: actions/upload-artifact@v4
+ with:
+ name: test-results-${{ matrix.python-version }}.xml
+ path: test-results-${{ matrix.python-version }}.xml
+ retention-days: 90
+ if: always()
+ - name: Download test report
+ uses: actions/download-artifact@v4
+ if: always()
+ with:
+ name: test-results-${{ matrix.python-version }}.xml
+ - name: Publish Test Results
+ uses: EnricoMi/publish-unit-test-result-action/macos@v2
+ if: always()
+ with:
+ files: |
+ test-results-${{ matrix.python-version }}.xml
+
+ publish_to_PyPi:
+ runs-on: ubuntu-22.04
+ needs: [ tests ]
+ if: github.event_name == 'release' && github.event.action == 'created'
+ steps:
+ - uses: actions/setup-python@v5
+ - name: Install poetry
+ uses: abatilo/actions-poetry@v2
+ with:
+ poetry-version: 1.8.4
+ - name: Download ripley package
+ uses: actions/download-artifact@v4
+ with:
+ name: dist
+ - name: Download pyproject.toml
+ uses: actions/download-artifact@v4
+ with:
+ name: pyproject.toml
+ - name: Download README.md
+ uses: actions/download-artifact@v4
+ with:
+ name: README.md
+ - name: Publish ripley package
+ run: |
+ poetry config pypi-token.pypi ${{ secrets.pypi_password }}
+ poetry publish
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..58df497
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,164 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+cover/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+# For a library or package, you might want to ignore these files since the code is
+# intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# poetry
+# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
+# This is especially recommended for binary packages to ensure reproducibility, and is more
+# commonly ignored for libraries.
+# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
+#poetry.lock
+
+# pdm
+# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
+#pdm.lock
+# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
+# in version control.
+# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
+.pdm.toml
+.pdm-python
+.pdm-build/
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+# PyCharm
+# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
+# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
+# and can be added to the global gitignore or merged into this file. For a more nuclear
+# option (not recommended) you can uncomment the following to ignore the entire idea folder.
+#.idea/
+
+test-results*
\ No newline at end of file
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..03d2e3c
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2024 Danila Ganchar
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/README.md b/README.md
new file mode 100755
index 0000000..3a535ad
--- /dev/null
+++ b/README.md
@@ -0,0 +1,15 @@
+# Ripley
+
+**Ripley** provides a simple interface for routine db operations and data maintenance. The main idea:
+
+- no dependencies
+- no error handlers
+- no package errors
+- isolation from the database driver
+
+Implementations: [Clickhouse](https://clickhouse.com/)
+
+
+
+
+[![Python versions](https://img.shields.io/badge/Package-Info-blue?logo=hackthebox)](https://d-ganchar.github.io/ripley.html)
\ No newline at end of file
diff --git a/docker-compose.yml b/docker-compose.yml
new file mode 100755
index 0000000..7f72f87
--- /dev/null
+++ b/docker-compose.yml
@@ -0,0 +1,27 @@
+version: '3.8'
+
+services:
+ clickhouse:
+ image: clickhouse/clickhouse-server:23.4.2.11-alpine
+ container_name: ripley_clickhouse
+ network_mode: host
+ logging:
+ driver: none
+ healthcheck:
+ test: wget --no-verbose --tries=1 --spider localhost:8123/ping || exit 1
+ interval: 2s
+ timeout: 2s
+ retries: 16
+
+ s3:
+ image: quay.io/minio/minio:RELEASE.2024-10-13T13-34-11Z
+ network_mode: host
+ logging:
+ driver: none
+ command:
+ - server
+ - --address=localhost:9001
+ - /data
+ environment:
+ - MINIO_ROOT_USER=ripley_key
+ - MINIO_ROOT_PASSWORD=ripley_secret
diff --git a/poetry.lock b/poetry.lock
new file mode 100644
index 0000000..12bb82c
--- /dev/null
+++ b/poetry.lock
@@ -0,0 +1,775 @@
+# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
+
+[[package]]
+name = "backports-zoneinfo"
+version = "0.2.1"
+description = "Backport of the standard library zoneinfo module"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "backports.zoneinfo-0.2.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:da6013fd84a690242c310d77ddb8441a559e9cb3d3d59ebac9aca1a57b2e18bc"},
+ {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:89a48c0d158a3cc3f654da4c2de1ceba85263fafb861b98b59040a5086259722"},
+ {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:1c5742112073a563c81f786e77514969acb58649bcdf6cdf0b4ed31a348d4546"},
+ {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win32.whl", hash = "sha256:e8236383a20872c0cdf5a62b554b27538db7fa1bbec52429d8d106effbaeca08"},
+ {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:8439c030a11780786a2002261569bdf362264f605dfa4d65090b64b05c9f79a7"},
+ {file = "backports.zoneinfo-0.2.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:f04e857b59d9d1ccc39ce2da1021d196e47234873820cbeaad210724b1ee28ac"},
+ {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:17746bd546106fa389c51dbea67c8b7c8f0d14b5526a579ca6ccf5ed72c526cf"},
+ {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5c144945a7752ca544b4b78c8c41544cdfaf9786f25fe5ffb10e838e19a27570"},
+ {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win32.whl", hash = "sha256:e55b384612d93be96506932a786bbcde5a2db7a9e6a4bb4bffe8b733f5b9036b"},
+ {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a76b38c52400b762e48131494ba26be363491ac4f9a04c1b7e92483d169f6582"},
+ {file = "backports.zoneinfo-0.2.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:8961c0f32cd0336fb8e8ead11a1f8cd99ec07145ec2931122faaac1c8f7fd987"},
+ {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e81b76cace8eda1fca50e345242ba977f9be6ae3945af8d46326d776b4cf78d1"},
+ {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7b0a64cda4145548fed9efc10322770f929b944ce5cee6c0dfe0c87bf4c0c8c9"},
+ {file = "backports.zoneinfo-0.2.1-cp38-cp38-win32.whl", hash = "sha256:1b13e654a55cd45672cb54ed12148cd33628f672548f373963b0bff67b217328"},
+ {file = "backports.zoneinfo-0.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:4a0f800587060bf8880f954dbef70de6c11bbe59c673c3d818921f042f9954a6"},
+ {file = "backports.zoneinfo-0.2.1.tar.gz", hash = "sha256:fadbfe37f74051d024037f223b8e001611eac868b5c5b06144ef4d8b799862f2"},
+]
+
+[package.extras]
+tzdata = ["tzdata"]
+
+[[package]]
+name = "boto3"
+version = "1.35.43"
+description = "The AWS SDK for Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "boto3-1.35.43-py3-none-any.whl", hash = "sha256:e6a50a0599f75b21de0de1a551a0564793d25b304fa623e4052e527b268de734"},
+ {file = "boto3-1.35.43.tar.gz", hash = "sha256:0197f460632804577aa78b2f6daf7b823bffa9d4d67a5cebb179efff0fe9631b"},
+]
+
+[package.dependencies]
+botocore = ">=1.35.43,<1.36.0"
+jmespath = ">=0.7.1,<2.0.0"
+s3transfer = ">=0.10.0,<0.11.0"
+
+[package.extras]
+crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
+
+[[package]]
+name = "botocore"
+version = "1.35.43"
+description = "Low-level, data-driven core of boto 3."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "botocore-1.35.43-py3-none-any.whl", hash = "sha256:7cfdee9117617da97daaf259dd8484bcdc259c59eb7d1ce7db9ecf8506b7d36c"},
+ {file = "botocore-1.35.43.tar.gz", hash = "sha256:04539b85ade060601a3023cacb538fc17aad8c059a5a2e18fe4bc5d0d91fbd72"},
+]
+
+[package.dependencies]
+jmespath = ">=0.7.1,<2.0.0"
+python-dateutil = ">=2.1,<3.0.0"
+urllib3 = [
+ {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""},
+ {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""},
+]
+
+[package.extras]
+crt = ["awscrt (==0.22.0)"]
+
+[[package]]
+name = "build"
+version = "1.2.2"
+description = "A simple, correct Python build frontend"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "build-1.2.2-py3-none-any.whl", hash = "sha256:277ccc71619d98afdd841a0e96ac9fe1593b823af481d3b0cea748e8894e0613"},
+ {file = "build-1.2.2.tar.gz", hash = "sha256:119b2fb462adef986483438377a13b2f42064a2a3a4161f24a0cca698a07ac8c"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "os_name == \"nt\""}
+importlib-metadata = {version = ">=4.6", markers = "python_full_version < \"3.10.2\""}
+packaging = ">=19.1"
+pyproject_hooks = "*"
+tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
+
+[package.extras]
+docs = ["furo (>=2023.08.17)", "sphinx (>=7.0,<8.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)", "sphinx-issues (>=3.0.0)"]
+test = ["build[uv,virtualenv]", "filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0)", "setuptools (>=56.0.0)", "setuptools (>=56.0.0)", "setuptools (>=67.8.0)", "wheel (>=0.36.0)"]
+typing = ["build[uv]", "importlib-metadata (>=5.1)", "mypy (>=1.9.0,<1.10.0)", "tomli", "typing-extensions (>=3.7.4.3)"]
+uv = ["uv (>=0.1.18)"]
+virtualenv = ["virtualenv (>=20.0.35)"]
+
+[[package]]
+name = "clickhouse-driver"
+version = "0.2.8"
+description = "Python driver with native interface for ClickHouse"
+optional = false
+python-versions = "<4,>=3.7"
+files = [
+ {file = "clickhouse-driver-0.2.8.tar.gz", hash = "sha256:844b3080e558acbacd42ee569ec83ca7aaa3728f7077b9314c8d09aaa393d752"},
+ {file = "clickhouse_driver-0.2.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3a3a708e020ed2df59e424631f1822ffef4353912fcee143f3b7fc34e866621d"},
+ {file = "clickhouse_driver-0.2.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d258d3c3ac0f03527e295eeaf3cebb0a976bc643f6817ccd1d0d71ce970641b4"},
+ {file = "clickhouse_driver-0.2.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f63fb64a55dea29ed6a7d1d6805ebc95c37108c8a36677bc045d904ad600828"},
+ {file = "clickhouse_driver-0.2.8-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b16d5dbd53fe32a99d3c4ab6c478c8aa9ae02aec5a2bd2f24180b0b4c03e1a5"},
+ {file = "clickhouse_driver-0.2.8-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ad2e1850ce91301ae203bc555fb83272dfebb09ad4df99db38c608d45fc22fa4"},
+ {file = "clickhouse_driver-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ae9239f61a18050164185ec0a3e92469d084377a66ae033cc6b4efa15922867"},
+ {file = "clickhouse_driver-0.2.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8f222f2577bf304e86eec73dbca9c19d7daa6abcafc0bef68bbf31dd461890b"},
+ {file = "clickhouse_driver-0.2.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:451ac3de1191531d030751b05f122219b93b3c509e781fad81c2c91f0e9256b6"},
+ {file = "clickhouse_driver-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5a2c4fea88e91f1d5217b760ffea84631e647d8db2265b821cbe7b0e015c7807"},
+ {file = "clickhouse_driver-0.2.8-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:19825a3044c48ab65dc6659eb9763e2f0821887bdd9ee14a2f9ae8c539281ebf"},
+ {file = "clickhouse_driver-0.2.8-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ae13044a10015225297868658a6f1843c2e34b9fcaa6268880e25c4fca9f3c4d"},
+ {file = "clickhouse_driver-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:548a77efb86012800e76db6d45b3dcffea9a1a26fa3d5fd42021298f0b9a6f16"},
+ {file = "clickhouse_driver-0.2.8-cp310-cp310-win32.whl", hash = "sha256:ebe4328eaaf937365114b5bab5626600ee57e57d4d099ba2ddbae48c2493f73d"},
+ {file = "clickhouse_driver-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:7beaeb4d7e6c3aba7e02375eeca85b20cc8e54dc31fcdb25d3c4308f2cd9465f"},
+ {file = "clickhouse_driver-0.2.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8e06ef6bb701c8e42a9c686d77ad30805cf431bb79fa8fe0f4d3dee819e9a12c"},
+ {file = "clickhouse_driver-0.2.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4afbcfa557419ed1783ecde3abbee1134e09b26c3ab0ada5b2118ae587357c2b"},
+ {file = "clickhouse_driver-0.2.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85f628b4bf6db0fe8fe13da8576a9b95c23b463dff59f4c7aa58cedf529d7d97"},
+ {file = "clickhouse_driver-0.2.8-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:036f4b3283796ca51610385c7b24bdac1bb873f8a2e97a179f66544594aa9840"},
+ {file = "clickhouse_driver-0.2.8-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2c8916d3d324ce8fd31f8dedd293dc2c29204b94785a5398d1ec1e7ea4e16a26"},
+ {file = "clickhouse_driver-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30bee7cddd85c04ec49c753b53580364d907cc05c44daafe31b924a352e5e525"},
+ {file = "clickhouse_driver-0.2.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:03c8a844f6b128348d099dc5d75fad70f4e85802d1649c1b835916ac94ae750a"},
+ {file = "clickhouse_driver-0.2.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:33965329393fd7740b445758787ddacdf70f35fa3411f98a1a86918fff679a46"},
+ {file = "clickhouse_driver-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8cf85a7ebb0a56182c5b659602e20bae6b36c48a0edf518a6e6f56042d3fcee0"},
+ {file = "clickhouse_driver-0.2.8-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c10fd1f921ff82638cb9513b9b4acfb575b421c44ef6bf6cf57ee3c487b9d538"},
+ {file = "clickhouse_driver-0.2.8-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:0a30d49bb6c34e3f5fe42e43dd6a7da0523ddfd05834ef02bd70b9363ea7de7e"},
+ {file = "clickhouse_driver-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ea32c377a347b0801fc7f2b242f2ec7d78df58047097352672d0de5fbfa9e390"},
+ {file = "clickhouse_driver-0.2.8-cp311-cp311-win32.whl", hash = "sha256:2a85529d1c0c3f2eedf7a4f736d0efc6e6c8032ac90ca5a63f7a067db58384fe"},
+ {file = "clickhouse_driver-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:1f438f83a7473ce7fe9c16cda8750e2fdda1b09fb87f0ec6b87a2b89acb13f24"},
+ {file = "clickhouse_driver-0.2.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9b71bbef6ee08252cee0593329c8ca8e623547627807d38195331f476eaf8136"},
+ {file = "clickhouse_driver-0.2.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f30b3dd388f28eb4052851effe671354db55aea87de748aaf607e7048f72413e"},
+ {file = "clickhouse_driver-0.2.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3bb27ce7ca61089c04dc04dbf207c9165d62a85eb9c99d1451fd686b6b773f9"},
+ {file = "clickhouse_driver-0.2.8-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59c04ec0b45602b6a63e0779ca7c3d3614be4710ec5ac7214da1b157d43527c5"},
+ {file = "clickhouse_driver-0.2.8-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a61b14244993c7e0f312983455b7851576a85ab5a9fcc6374e75d2680a985e76"},
+ {file = "clickhouse_driver-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c99a1b0b7759ccd1bf44c65210543c228ba704e3153014fd3aabfe56a227b1a5"},
+ {file = "clickhouse_driver-0.2.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9f14d860088ab2c7eeb3782c9490ad3f6bf6b1e9235e9db9c3b0079cd4751ffa"},
+ {file = "clickhouse_driver-0.2.8-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:303887a14a71faddcdee150bc8cde498c25c446b0a72ae586bd67d0c366dbff5"},
+ {file = "clickhouse_driver-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:359814e4f989c138bfb83e3c81f8f88c8449721dcf32cb8cc25fdb86f4b53c99"},
+ {file = "clickhouse_driver-0.2.8-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:42de61b4cf9053698b14dbe29e1e3d78cb0a7aaef874fd854df390de5c9cc1f1"},
+ {file = "clickhouse_driver-0.2.8-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:3bf3089f220480e5a69cbec79f3b65c23afb5c2836e7285234140e5f237f2768"},
+ {file = "clickhouse_driver-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:41daa4ae5ada22f10c758b0b3b477a51f5df56eef8569cff8e2275de6d9b1b96"},
+ {file = "clickhouse_driver-0.2.8-cp312-cp312-win32.whl", hash = "sha256:03ea71c7167c6c38c3ba2bbed43615ce0c41ebf3bfa28d96ffcd93cd1cdd07d8"},
+ {file = "clickhouse_driver-0.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:76985286e10adb2115da116ae25647319bc485ad9e327cbc27296ccf0b052180"},
+ {file = "clickhouse_driver-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:271529124914c439a5bbcf8a90e3101311d60c1813e03c0467e01fbabef489ee"},
+ {file = "clickhouse_driver-0.2.8-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f8f499746bc027c6d05de09efa7b2e4f2241f66c1ac2d6b7748f90709b00e10"},
+ {file = "clickhouse_driver-0.2.8-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f29f256520bb718c532e7fcd85250d4001f49acbaa9e6896bdf4a70d5557e2ef"},
+ {file = "clickhouse_driver-0.2.8-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:104d062bdf7eab74e92efcbf72088b3241365242b4f119b3fe91057c4d80825c"},
+ {file = "clickhouse_driver-0.2.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee34ed08592a6eff5e176f42897c6ab4dfd8c07df16e9f392e18f1f2ee3fe3ca"},
+ {file = "clickhouse_driver-0.2.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5be9a8d89de881d5ea9d46f9d293caa72dbc7f40b105374cafd88f52b2099ea"},
+ {file = "clickhouse_driver-0.2.8-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c57efc768fa87e83d6778e7bbd180dd1ff5d647044983ec7d238a8577bd25fa5"},
+ {file = "clickhouse_driver-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e1a003475f2d54e9fea8de86b57bc26b409c9efea3d298409ab831f194d62c3b"},
+ {file = "clickhouse_driver-0.2.8-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:fba71cf41934a23156290a70ef794a5dadc642b21cc25eb13e1f99f2512c8594"},
+ {file = "clickhouse_driver-0.2.8-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:7289b0e9d1019fed418c577963edd66770222554d1da0c491ca436593667256e"},
+ {file = "clickhouse_driver-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:16e810cc9be18fdada545b9a521054214dd607bb7aa2f280ca488da23a077e48"},
+ {file = "clickhouse_driver-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:ed4a6590015f18f414250149255dc2ae81ae956b6e670b290d52c2ecb61ed517"},
+ {file = "clickhouse_driver-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:9d454f16ccf1b2185cc630f6fb2160b1abde27759c4e94c42e30b9ea911d58f0"},
+ {file = "clickhouse_driver-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2e487d49c24448873a6802c34aa21858b9e3fb4a2605268a980a5c02b54a6bae"},
+ {file = "clickhouse_driver-0.2.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e877de75b97ddb11a027a7499171ea0aa9cad569b18fce53c9d508353000cfae"},
+ {file = "clickhouse_driver-0.2.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c60dcefddf6e2c65c92b7e6096c222ff6ed73b01b6c5712f9ce8a23f2ec80f1a"},
+ {file = "clickhouse_driver-0.2.8-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:422cbbabfad3f9b533d9f517f6f4e174111a613cba878402f7ef632b0eadec3a"},
+ {file = "clickhouse_driver-0.2.8-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ff8a8e25ff6051ff3d0528dbe36305b0140075d2fa49432149ee2a7841f23ed"},
+ {file = "clickhouse_driver-0.2.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19c7a5960d4f7f9a8f9a560ae05020ff5afe874b565cce06510586a0096bb626"},
+ {file = "clickhouse_driver-0.2.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5b3333257b46f307b713ba507e4bf11b7531ba3765a4150924532298d645ffd"},
+ {file = "clickhouse_driver-0.2.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bbc2252a697c674e1b8b6123cf205d2b15979eddf74e7ada0e62a0ecc81a75c3"},
+ {file = "clickhouse_driver-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:af7f1a9a99dafb0f2a91d1a2d4a3e37f86076147d59abbe69b28d39308fe20fb"},
+ {file = "clickhouse_driver-0.2.8-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:580c34cc505c492a8abeacbd863ce46158643bece914d8fe2fadea0e94c4e0c1"},
+ {file = "clickhouse_driver-0.2.8-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:5b905eaa6fd3b453299f946a2c8f4a6392f379597e51e46297c6a37699226cda"},
+ {file = "clickhouse_driver-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6e2b5891c52841aedf803b8054085eb8a611ad4bf57916787a1a9aabf618fb77"},
+ {file = "clickhouse_driver-0.2.8-cp38-cp38-win32.whl", hash = "sha256:b58a5612db8b3577dc2ae6fda4c783d61c2376396bb364545530aa6a767f166d"},
+ {file = "clickhouse_driver-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:96b0424bb5dd698c10b899091562a78f4933a9a039409f310fb74db405d73854"},
+ {file = "clickhouse_driver-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:22cbed52daa584ca9a93efd772ee5c8c1f68ceaaeb21673985004ec2fd411c49"},
+ {file = "clickhouse_driver-0.2.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e36156fe8a355fc830cc0ea1267c804c631c9dbd9b6accdca868a426213e5929"},
+ {file = "clickhouse_driver-0.2.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c1341325f4180e1318d0d2cf0b268008ea250715c6f30a5ccce586860c000b5"},
+ {file = "clickhouse_driver-0.2.8-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cb52161276f7d77d4af09f1aab97a16edf86014a89e3d9923f0a6b8fdaa12438"},
+ {file = "clickhouse_driver-0.2.8-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d1ccd47040c0a8753684a20a0f83b8a0820386889fdf460a3248e0eed142032"},
+ {file = "clickhouse_driver-0.2.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fcda48e938d011e5f4dcebf965e6ec19e020e8efa207b98eeb99c12fa873236d"},
+ {file = "clickhouse_driver-0.2.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2252ab3f8b3bbd705e1d7dc80395c7bea14f5ae51a268fc7be5328da77c0e200"},
+ {file = "clickhouse_driver-0.2.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e1b9ef3fa0cc6c9de77daa74a2f183186d0b5556c4f6870fc966a41fde6cae2b"},
+ {file = "clickhouse_driver-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d0afa3c68fed6b5e6f23eb3f053d3aba86d09dbbc7706a0120ab5595d5c37003"},
+ {file = "clickhouse_driver-0.2.8-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:102027bb687ff7a978f7110348f39f0dce450ab334787edbc64b8a9927238e32"},
+ {file = "clickhouse_driver-0.2.8-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:9fc1ae52a171ded7d9f1f971b9b5bb0ce4d0490a54e102f3717cea51011d0308"},
+ {file = "clickhouse_driver-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5a62c691be83b1da72ff3455790b50b0f894b7932ac962a8133f3f9c04c943b3"},
+ {file = "clickhouse_driver-0.2.8-cp39-cp39-win32.whl", hash = "sha256:8b5068cef07cfba5be25a9a461c010ce7a0fe2de5b0b0262c6030684f43fa7f5"},
+ {file = "clickhouse_driver-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:cd71965d00b0f3ba992652d577b1d46b87100a67b3e0dc5c191c88092e484c81"},
+ {file = "clickhouse_driver-0.2.8-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4db0812c43f67e7b1805c05e2bc08f7d670ddfd8d8c671c9b47cdb52f4f74129"},
+ {file = "clickhouse_driver-0.2.8-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56622ffefe94a82d9a30747e3486819104d1310d7a94f0e37da461d7112e9864"},
+ {file = "clickhouse_driver-0.2.8-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c47c8ed61b2f35bb29d991f66d6e03d5cc786def56533480331b2a584854dd5"},
+ {file = "clickhouse_driver-0.2.8-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dec001a1a49b993522dd134d2fca161352f139d42edcda0e983b8ea8f5023cda"},
+ {file = "clickhouse_driver-0.2.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c03bd486540a6c03aa5a164b7ec6c50980df9642ab1ce22cb70327e4090bdc60"},
+ {file = "clickhouse_driver-0.2.8-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c059c3da454f0cc0a6f056b542a0c1784cd0398613d25326b11fd1c6f9f7e8d2"},
+ {file = "clickhouse_driver-0.2.8-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc7f9677c637b710046ec6c6c0cab25b4c4ff21620e44f462041d7455e9e8d13"},
+ {file = "clickhouse_driver-0.2.8-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba3f6b8fdd7a2e6a831ebbcaaf346f7c8c5eb5085a350c9d4d1ce7053a050b70"},
+ {file = "clickhouse_driver-0.2.8-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c2db3ae29950c80837d270b5ab63c74597afce226b474930060cac7969287b"},
+ {file = "clickhouse_driver-0.2.8-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:b7767019a301dad314e7b515046535a45eda84bd9c29590bc3e99b1c334f69e7"},
+ {file = "clickhouse_driver-0.2.8-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ba8b8b80fa8850546aa40acc952835b1f149af17182cdf3db4f2133b2a241fe8"},
+ {file = "clickhouse_driver-0.2.8-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:924f11e87e3dcbbc1c9e8158af9917f182cd5e96d37385485d6268f59b564142"},
+ {file = "clickhouse_driver-0.2.8-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c39e1477ad310a4d276db17c1e1cf6fb059c29eb8d21351afefd5a22de381c6"},
+ {file = "clickhouse_driver-0.2.8-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e950b9a63af5fa233e3da0e57a7ebd85d4b319e65eef5f9daac84532836f4123"},
+ {file = "clickhouse_driver-0.2.8-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:0698dc57373b2f42f3a95bd419d9fa07f2d02150f13a0db2909a2651208262b9"},
+ {file = "clickhouse_driver-0.2.8-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e0694ca2fb459c23e44036d975fe89544a7c9918618b5d8bda9a8aa2d24e5c37"},
+ {file = "clickhouse_driver-0.2.8-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62620348aeae5a905ccb8f7e6bff8d76aae9a95d81aa8c8f6fce0f2af7e104b8"},
+ {file = "clickhouse_driver-0.2.8-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66276fd5092cccdd6f3123df4357a068fb1972b7e2622fab6f235948c50b6eed"},
+ {file = "clickhouse_driver-0.2.8-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f86fe87327662b597824d0d7505cc600b0919473b22bbbd178a1a4d4e29283e1"},
+ {file = "clickhouse_driver-0.2.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:54b9c6ff0aaabdcf7e80a6d9432459611b3413d6a66bec41cbcdad7212721cc7"},
+]
+
+[package.dependencies]
+pytz = "*"
+tzlocal = "*"
+
+[package.extras]
+lz4 = ["clickhouse-cityhash (>=1.0.2.1)", "lz4", "lz4 (<=3.0.1)"]
+numpy = ["numpy (>=1.12.0)", "pandas (>=0.24.0)"]
+zstd = ["clickhouse-cityhash (>=1.0.2.1)", "zstd"]
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+description = "Cross-platform colored terminal text."
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+files = [
+ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
+ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
+]
+
+[[package]]
+name = "coverage"
+version = "7.6.1"
+description = "Code coverage measurement for Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"},
+ {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"},
+ {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"},
+ {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"},
+ {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"},
+ {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"},
+ {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"},
+ {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"},
+ {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"},
+ {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"},
+ {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"},
+ {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"},
+ {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"},
+ {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"},
+ {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"},
+ {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"},
+ {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"},
+ {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"},
+ {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"},
+ {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"},
+ {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"},
+ {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"},
+ {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"},
+ {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"},
+ {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"},
+ {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"},
+ {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"},
+ {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"},
+ {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"},
+ {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"},
+ {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"},
+ {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"},
+ {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"},
+ {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"},
+ {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"},
+ {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"},
+ {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"},
+ {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"},
+ {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"},
+ {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"},
+ {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"},
+ {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"},
+ {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"},
+ {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"},
+ {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"},
+ {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"},
+ {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"},
+ {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"},
+ {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"},
+ {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"},
+ {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"},
+ {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"},
+ {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"},
+ {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"},
+ {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"},
+ {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"},
+ {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"},
+ {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"},
+ {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"},
+ {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"},
+ {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"},
+ {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"},
+ {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"},
+ {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"},
+ {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"},
+ {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"},
+ {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"},
+ {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"},
+ {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"},
+ {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"},
+ {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"},
+ {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"},
+]
+
+[package.dependencies]
+tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""}
+
+[package.extras]
+toml = ["tomli"]
+
+[[package]]
+name = "exceptiongroup"
+version = "1.2.2"
+description = "Backport of PEP 654 (exception groups)"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"},
+ {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"},
+]
+
+[package.extras]
+test = ["pytest (>=6)"]
+
+[[package]]
+name = "importlib-metadata"
+version = "8.5.0"
+description = "Read metadata from Python packages"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"},
+ {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"},
+]
+
+[package.dependencies]
+zipp = ">=3.20"
+
+[package.extras]
+check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"]
+cover = ["pytest-cov"]
+doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+enabler = ["pytest-enabler (>=2.2)"]
+perf = ["ipython"]
+test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"]
+type = ["pytest-mypy"]
+
+[[package]]
+name = "iniconfig"
+version = "2.0.0"
+description = "brain-dead simple config-ini parsing"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
+ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
+]
+
+[[package]]
+name = "jinja2"
+version = "3.1.4"
+description = "A very fast and expressive template engine."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"},
+ {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"},
+]
+
+[package.dependencies]
+MarkupSafe = ">=2.0"
+
+[package.extras]
+i18n = ["Babel (>=2.7)"]
+
+[[package]]
+name = "jmespath"
+version = "1.0.1"
+description = "JSON Matching Expressions"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"},
+ {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"},
+]
+
+[[package]]
+name = "junitxml"
+version = "0.7"
+description = "PyJUnitXML, a pyunit extension to output JUnit compatible XML."
+optional = false
+python-versions = "*"
+files = [
+ {file = "junitxml-0.7.tar.gz", hash = "sha256:a5901127067ab7f3d11df30727368c129c69b3f5595c697daf4f5ed80b1baaa3"},
+]
+
+[[package]]
+name = "markupsafe"
+version = "2.1.5"
+description = "Safely add untrusted strings to HTML/XML markup."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"},
+ {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"},
+]
+
+[[package]]
+name = "packaging"
+version = "24.1"
+description = "Core utilities for Python packages"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"},
+ {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"},
+]
+
+[[package]]
+name = "parameterized"
+version = "0.9.0"
+description = "Parameterized testing with any Python test framework"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "parameterized-0.9.0-py2.py3-none-any.whl", hash = "sha256:4e0758e3d41bea3bbd05ec14fc2c24736723f243b28d702081aef438c9372b1b"},
+ {file = "parameterized-0.9.0.tar.gz", hash = "sha256:7fc905272cefa4f364c1a3429cbbe9c0f98b793988efb5bf90aac80f08db09b1"},
+]
+
+[package.extras]
+dev = ["jinja2"]
+
+[[package]]
+name = "pluggy"
+version = "1.5.0"
+description = "plugin and hook calling mechanisms for python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
+ {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
+]
+
+[package.extras]
+dev = ["pre-commit", "tox"]
+testing = ["pytest", "pytest-benchmark"]
+
+[[package]]
+name = "pyproject-hooks"
+version = "1.2.0"
+description = "Wrappers to call pyproject.toml-based build backend hooks."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pyproject_hooks-1.2.0-py3-none-any.whl", hash = "sha256:9e5c6bfa8dcc30091c74b0cf803c81fdd29d94f01992a7707bc97babb1141913"},
+ {file = "pyproject_hooks-1.2.0.tar.gz", hash = "sha256:1e859bd5c40fae9448642dd871adf459e5e2084186e8d2c2a79a824c970da1f8"},
+]
+
+[[package]]
+name = "pytest"
+version = "8.3.3"
+description = "pytest: simple powerful testing with Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"},
+ {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "sys_platform == \"win32\""}
+exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
+iniconfig = "*"
+packaging = "*"
+pluggy = ">=1.5,<2"
+tomli = {version = ">=1", markers = "python_version < \"3.11\""}
+
+[package.extras]
+dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
+
+[[package]]
+name = "pytest-cov"
+version = "5.0.0"
+description = "Pytest plugin for measuring coverage."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"},
+ {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"},
+]
+
+[package.dependencies]
+coverage = {version = ">=5.2.1", extras = ["toml"]}
+pytest = ">=4.6"
+
+[package.extras]
+testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"]
+
+[[package]]
+name = "pytest-html"
+version = "4.1.1"
+description = "pytest plugin for generating HTML reports"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pytest_html-4.1.1-py3-none-any.whl", hash = "sha256:c8152cea03bd4e9bee6d525573b67bbc6622967b72b9628dda0ea3e2a0b5dd71"},
+ {file = "pytest_html-4.1.1.tar.gz", hash = "sha256:70a01e8ae5800f4a074b56a4cb1025c8f4f9b038bba5fe31e3c98eb996686f07"},
+]
+
+[package.dependencies]
+jinja2 = ">=3.0.0"
+pytest = ">=7.0.0"
+pytest-metadata = ">=2.0.0"
+
+[package.extras]
+docs = ["pip-tools (>=6.13.0)"]
+test = ["assertpy (>=1.1)", "beautifulsoup4 (>=4.11.1)", "black (>=22.1.0)", "flake8 (>=4.0.1)", "pre-commit (>=2.17.0)", "pytest-mock (>=3.7.0)", "pytest-rerunfailures (>=11.1.2)", "pytest-xdist (>=2.4.0)", "selenium (>=4.3.0)", "tox (>=3.24.5)"]
+
+[[package]]
+name = "pytest-metadata"
+version = "3.1.1"
+description = "pytest plugin for test session metadata"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pytest_metadata-3.1.1-py3-none-any.whl", hash = "sha256:c8e0844db684ee1c798cfa38908d20d67d0463ecb6137c72e91f418558dd5f4b"},
+ {file = "pytest_metadata-3.1.1.tar.gz", hash = "sha256:d2a29b0355fbc03f168aa96d41ff88b1a3b44a3b02acbe491801c98a048017c8"},
+]
+
+[package.dependencies]
+pytest = ">=7.0.0"
+
+[package.extras]
+test = ["black (>=22.1.0)", "flake8 (>=4.0.1)", "pre-commit (>=2.17.0)", "tox (>=3.24.5)"]
+
+[[package]]
+name = "python-dateutil"
+version = "2.9.0.post0"
+description = "Extensions to the standard Python datetime module"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
+files = [
+ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"},
+ {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"},
+]
+
+[package.dependencies]
+six = ">=1.5"
+
+[[package]]
+name = "pytz"
+version = "2024.2"
+description = "World timezone definitions, modern and historical"
+optional = false
+python-versions = "*"
+files = [
+ {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"},
+ {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"},
+]
+
+[[package]]
+name = "ruff"
+version = "0.5.1"
+description = "An extremely fast Python linter and code formatter, written in Rust."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "ruff-0.5.1-py3-none-linux_armv6l.whl", hash = "sha256:6ecf968fcf94d942d42b700af18ede94b07521bd188aaf2cd7bc898dd8cb63b6"},
+ {file = "ruff-0.5.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:204fb0a472f00f2e6280a7c8c7c066e11e20e23a37557d63045bf27a616ba61c"},
+ {file = "ruff-0.5.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d235968460e8758d1e1297e1de59a38d94102f60cafb4d5382033c324404ee9d"},
+ {file = "ruff-0.5.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38beace10b8d5f9b6bdc91619310af6d63dd2019f3fb2d17a2da26360d7962fa"},
+ {file = "ruff-0.5.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e478d2f09cf06add143cf8c4540ef77b6599191e0c50ed976582f06e588c994"},
+ {file = "ruff-0.5.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0368d765eec8247b8550251c49ebb20554cc4e812f383ff9f5bf0d5d94190b0"},
+ {file = "ruff-0.5.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:3a9a9a1b582e37669b0138b7c1d9d60b9edac880b80eb2baba6d0e566bdeca4d"},
+ {file = "ruff-0.5.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bdd9f723e16003623423affabcc0a807a66552ee6a29f90eddad87a40c750b78"},
+ {file = "ruff-0.5.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:be9fd62c1e99539da05fcdc1e90d20f74aec1b7a1613463ed77870057cd6bd96"},
+ {file = "ruff-0.5.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e216fc75a80ea1fbd96af94a6233d90190d5b65cc3d5dfacf2bd48c3e067d3e1"},
+ {file = "ruff-0.5.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c4c2112e9883a40967827d5c24803525145e7dab315497fae149764979ac7929"},
+ {file = "ruff-0.5.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:dfaf11c8a116394da3b65cd4b36de30d8552fa45b8119b9ef5ca6638ab964fa3"},
+ {file = "ruff-0.5.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d7ceb9b2fe700ee09a0c6b192c5ef03c56eb82a0514218d8ff700f6ade004108"},
+ {file = "ruff-0.5.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:bac6288e82f6296f82ed5285f597713acb2a6ae26618ffc6b429c597b392535c"},
+ {file = "ruff-0.5.1-py3-none-win32.whl", hash = "sha256:5c441d9c24ec09e1cb190a04535c5379b36b73c4bc20aa180c54812c27d1cca4"},
+ {file = "ruff-0.5.1-py3-none-win_amd64.whl", hash = "sha256:b1789bf2cd3d1b5a7d38397cac1398ddf3ad7f73f4de01b1e913e2abc7dfc51d"},
+ {file = "ruff-0.5.1-py3-none-win_arm64.whl", hash = "sha256:2875b7596a740cbbd492f32d24be73e545a4ce0a3daf51e4f4e609962bfd3cd2"},
+ {file = "ruff-0.5.1.tar.gz", hash = "sha256:3164488aebd89b1745b47fd00604fb4358d774465f20d1fcd907f9c0fc1b0655"},
+]
+
+[[package]]
+name = "s3transfer"
+version = "0.10.3"
+description = "An Amazon S3 Transfer Manager"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "s3transfer-0.10.3-py3-none-any.whl", hash = "sha256:263ed587a5803c6c708d3ce44dc4dfedaab4c1a32e8329bab818933d79ddcf5d"},
+ {file = "s3transfer-0.10.3.tar.gz", hash = "sha256:4f50ed74ab84d474ce614475e0b8d5047ff080810aac5d01ea25231cfc944b0c"},
+]
+
+[package.dependencies]
+botocore = ">=1.33.2,<2.0a.0"
+
+[package.extras]
+crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"]
+
+[[package]]
+name = "six"
+version = "1.16.0"
+description = "Python 2 and 3 compatibility utilities"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
+files = [
+ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
+ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
+]
+
+[[package]]
+name = "tomli"
+version = "2.0.2"
+description = "A lil' TOML parser"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"},
+ {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"},
+]
+
+[[package]]
+name = "tzdata"
+version = "2024.2"
+description = "Provider of IANA time zone data"
+optional = false
+python-versions = ">=2"
+files = [
+ {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"},
+ {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"},
+]
+
+[[package]]
+name = "tzlocal"
+version = "5.2"
+description = "tzinfo object for the local timezone"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8"},
+ {file = "tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e"},
+]
+
+[package.dependencies]
+"backports.zoneinfo" = {version = "*", markers = "python_version < \"3.9\""}
+tzdata = {version = "*", markers = "platform_system == \"Windows\""}
+
+[package.extras]
+devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"]
+
+[[package]]
+name = "urllib3"
+version = "1.26.20"
+description = "HTTP library with thread-safe connection pooling, file post, and more."
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
+files = [
+ {file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"},
+ {file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"},
+]
+
+[package.extras]
+brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
+secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
+socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
+
+[[package]]
+name = "urllib3"
+version = "2.2.3"
+description = "HTTP library with thread-safe connection pooling, file post, and more."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"},
+ {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"},
+]
+
+[package.extras]
+brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
+h2 = ["h2 (>=4,<5)"]
+socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
+zstd = ["zstandard (>=0.18.0)"]
+
+[[package]]
+name = "zipp"
+version = "3.20.2"
+description = "Backport of pathlib-compatible object wrapper for zip files"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"},
+ {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"},
+]
+
+[package.extras]
+check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"]
+cover = ["pytest-cov"]
+doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+enabler = ["pytest-enabler (>=2.2)"]
+test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"]
+type = ["pytest-mypy"]
+
+[metadata]
+lock-version = "2.0"
+python-versions = ">=3.8 <3.13"
+content-hash = "57e4df8645206412e027b20f317c8a109611886821c336c372af84b9805669fa"
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..d1940d9
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,36 @@
+[build-system]
+requires = ["poetry-core==1.9.1"]
+build-backend = "poetry.core.masonry.api"
+
+[tool.poetry]
+name = "ripley"
+# 0.dev.0 are replaced when CI / CD. see: .github/worflows build job
+version = "0.dev.0"
+description = "data / database manipulation tool. See: https://github.com/d-ganchar/ripley"
+authors = ["Danila Ganchar"]
+license = "MIT"
+readme = "README.md"
+keywords = ['data', 'database', 'sql', 'clickhouse']
+
+classifiers = [
+ 'Development Status :: 3 - Alpha',
+ 'Topic :: Database',
+ 'Programming Language :: SQL',
+ 'Programming Language :: Python :: 3.8',
+ 'Programming Language :: Python :: 3.9',
+ 'Programming Language :: Python :: 3.10',
+ 'Programming Language :: Python :: 3.11',
+ 'Programming Language :: Python :: 3.12',
+]
+
+[tool.poetry.dependencies]
+python = ">=3.8 <3.13"
+
+
+[tool.poetry.group.dev.dependencies]
+pytest = "^8.3.3"
+parameterized = "^0.9.0"
+ruff = "0.5.1"
+clickhouse-driver = "0.2.8"
+build = "1.2.2"
+boto3 = "^1.35.43"
diff --git a/ripley/__init__.py b/ripley/__init__.py
new file mode 100755
index 0000000..4168448
--- /dev/null
+++ b/ripley/__init__.py
@@ -0,0 +1,8 @@
+from typing import Any
+
+from ._clickhouse.main_service import MainService as _MainClickhouse
+from ._protocols.clickhouse import ClickhouseProtocol
+
+
+def from_clickhouse(client: Any) -> ClickhouseProtocol:
+ return _MainClickhouse(client)
diff --git a/ripley/_base_model.py b/ripley/_base_model.py
new file mode 100755
index 0000000..9373e36
--- /dev/null
+++ b/ripley/_base_model.py
@@ -0,0 +1,10 @@
+from dataclasses import dataclass
+from typing import Any
+
+
+@dataclass
+class BaseModel:
+ def __eq__(self, __value: Any):
+ return isinstance(__value, self.__class__) and self.__dict__ == __value.__dict__
+
+
diff --git a/ripley/_clickhouse/__init__.py b/ripley/_clickhouse/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ripley/_clickhouse/cmd_service.py b/ripley/_clickhouse/cmd_service.py
new file mode 100644
index 0000000..a1056e2
--- /dev/null
+++ b/ripley/_clickhouse/cmd_service.py
@@ -0,0 +1,76 @@
+import re
+from copy import deepcopy
+from typing import Any, Type, List, Dict
+
+from .._log import log
+from .._sql_cmd.clickhouse import AlterOnClusterCmd, CreateTableOnClusterCmd, TruncateOnClusterCmd, CreateDbOnCluster
+from .._sql_cmd.general import AbstractSql
+
+
+class CmdService:
+ def __init__(self, client: Any) -> None:
+ self._client = client
+ self._settings = {}
+ self._on_cluster = ''
+
+ @property
+ def settings(self) -> dict:
+ return self._settings
+
+ @property
+ def active_db(self) -> str:
+ return self._client.get_connection().database
+
+ @property
+ def on_cluster(self) -> str:
+ return self._on_cluster
+
+ def get_db_or_default(self, db: str = '') -> str:
+ return db if db else self.active_db
+
+ def get_full_table_name(self, table: str, db: str = '') -> str:
+ return f'{self.get_db_or_default(db)}.{table}'
+
+ def skip_on_cluster(self):
+ self._on_cluster = ''
+
+ def set_on_cluster(self, name: str):
+ self._on_cluster = name
+
+ def set_settings(self, settings: dict):
+ self._settings = settings
+
+ def skip_settings(self):
+ self._settings = {}
+
+ def exec(self, sql: str, params: dict = None, with_column_types: bool = False):
+ return self._client.execute(sql, params=params, with_column_types=with_column_types, settings=self._settings)
+
+ def get_records(self, sql: str, model: Type = None, params: dict = None) -> List[Any]:
+ data, columns = self.exec(sql, params, True)
+ columns = [re.sub(r'\W', '_', name) for name, type_ in columns]
+ records = []
+
+ for rec in data:
+ params = {columns[ix_]: value_ for ix_, value_ in enumerate(rec)}
+ records.append(model(**params) if model else params)
+
+ return records
+
+ def get_first_record(self, sql: str, model: Type = None, params: dict = None) -> Any:
+ records = self.get_records(sql, model, params)
+ if records:
+ return records[0]
+
+ def run_cmd(self, model_class: Type[AbstractSql], model_params: Dict) -> Any:
+ params = deepcopy(model_params)
+ if issubclass(
+ model_class,
+ (AlterOnClusterCmd, CreateTableOnClusterCmd, TruncateOnClusterCmd, CreateDbOnCluster)
+ ):
+ if self._on_cluster:
+ params['on_cluster'] = self._on_cluster
+
+ cmd = model_class(**params)
+ log.info('%s\nSETTINGS %s', cmd, self._settings)
+ self.exec(cmd.to_sql())
diff --git a/ripley/_clickhouse/db_service.py b/ripley/_clickhouse/db_service.py
new file mode 100644
index 0000000..1b03de5
--- /dev/null
+++ b/ripley/_clickhouse/db_service.py
@@ -0,0 +1,21 @@
+from typing import Any
+
+from .cmd_service import CmdService
+from .system_service import SystemService
+from .._sql_cmd.clickhouse import CreateDbOnCluster
+from ..clickhouse_models.db import ClickhouseDbModel
+
+
+class DbService:
+ def __init__(self, client: Any, system: SystemService, cmd: CmdService) -> None:
+ self._client = client
+ self._cmd = cmd
+ self._system = system
+
+ def create_db(self, name: str, engine: str = '') -> ClickhouseDbModel:
+ self._cmd.run_cmd(
+ CreateDbOnCluster,
+ model_params=dict(name=name, engine=engine),
+ )
+
+ return self._system.get_database_by_name(name)
diff --git a/ripley/_clickhouse/main_service.py b/ripley/_clickhouse/main_service.py
new file mode 100644
index 0000000..24609e8
--- /dev/null
+++ b/ripley/_clickhouse/main_service.py
@@ -0,0 +1,119 @@
+from typing import Any, List
+
+from .cmd_service import CmdService
+from .db_service import DbService
+from .partition_service import PartitionService
+from .system_service import SystemService
+from .table_service import TableService
+from .._protocols.clickhouse import ClickhouseProtocol
+from ..clickhouse_models.column import ClickhouseColumnModel as Column
+from ..clickhouse_models.db import ClickhouseDbModel as Db
+from ..clickhouse_models.disk import ClickhouseDiskModel as Disk
+from ..clickhouse_models.partition import ClickhousePartitionModel as Partition
+from ..clickhouse_models.process import ClickhouseProcessModel as Process
+from ..clickhouse_models.s3_settings import ClickhouseS3SettingsModel as S3Settings
+from ..clickhouse_models.table import ClickhouseTableModel as Table
+
+
+class MainService(ClickhouseProtocol):
+ def __init__(self, client: Any) -> None:
+ self._client = client
+ self._cmd = CmdService(client)
+ self._system = SystemService(self._cmd)
+ self._partition = PartitionService(client, self._system, self._cmd)
+ self._table = TableService(client, self._system, self._cmd)
+ self._db = DbService(client, self._system, self._cmd)
+
+ def ping(self) -> bool:
+ return self._client.get_connection().ping()
+
+ @property
+ def active_db(self) -> str:
+ return self._cmd.active_db
+
+ @property
+ def on_cluster(self) -> str:
+ return self._cmd.on_cluster
+
+ @property
+ def settings(self) -> dict:
+ return self._cmd.settings
+
+ def set_settings(self, settings: dict):
+ self._cmd.set_settings(settings)
+
+ def skip_settings(self):
+ self._cmd.skip_settings()
+
+ def set_on_cluster(self, name: str):
+ self._cmd.set_on_cluster(name)
+
+ def skip_on_cluster(self):
+ self._cmd.skip_on_cluster()
+
+ def create_db(self, name: str, engine: str = '') -> Db:
+ return self._db.create_db(name, engine)
+
+ def exec(self, sql: str, params: dict = None) -> List:
+ return self._cmd.exec(sql, params)
+
+ def move_partition(self, from_table: Table, to_table: Table, partition: str) -> None:
+ self._partition.move_partition(from_table, to_table, partition)
+
+ def replace_partition(self, from_table: Table, to_table: Table, partition: str) -> None:
+ self._partition.replace_partition(from_table, to_table, partition)
+
+ def drop_partition(self, table: Table, partition: str) -> None:
+ self._partition.drop_partition(table, partition)
+
+ def detach_partition(self, table: Table, partition: str) -> None:
+ self._partition.detach_partition(table, partition)
+
+ def attach_partition(self, table: Table, partition: str) -> None:
+ self._partition.attach_partition(table, partition)
+
+ def get_databases(self) -> List[Db]:
+ return self._system.get_databases()
+
+ def get_database_by_name(self, name: str = '') -> Db:
+ return self._system.get_database_by_name(name)
+
+ def get_tables_by_db(self, db: str = '') -> List[Table]:
+ return self._system.get_tables_by_db(db)
+
+ def get_table_by_name(self, table: str, db: str = '') -> Table:
+ return self._system.get_table_by_name(table, db)
+
+ def get_table_partitions(self, table: str, db: str = '') -> List[Partition]:
+ return self._system.get_table_partitions(table, db)
+
+ def get_processes(self) -> List[Process]:
+ return self._system.get_processes()
+
+ def get_process_by_query_id(self, query_id: str) -> Process:
+ return self._system.get_process_by_query_id(query_id)
+
+ def get_disks(self) -> List[Disk]:
+ return self._system.get_disks()
+
+ def get_table_columns(self, table: str, db: str = '') -> List[Column]:
+ return self._system.get_table_columns(table, db)
+
+ def create_table_as(self, from_table: Table, table: str, db: str = '', order_by: list = None,
+ partition_by: list = None, engine: str = '') -> Table:
+ return self._table.create_table_as(from_table, table, db, order_by, partition_by, engine)
+
+ def insert_from_table(self, from_table: Table, to_table: Table) -> None:
+ self._table.insert_from_table(from_table, to_table)
+
+ def truncate(self, table: str, db: str = '') -> None:
+ self._table.truncate(table, db)
+
+ def insert_from_s3(self, table: Table, s3_settings: S3Settings):
+ self._table.insert_from_s3(table, s3_settings)
+
+ def insert_table_to_s3(self, table: Table, s3_settings: S3Settings):
+ self._table.insert_table_to_s3(table, s3_settings)
+
+ def rename_table(self, table: Table, new_name: str, db: str = '') -> None:
+ self._table.rename_table(table, new_name, db)
diff --git a/ripley/_clickhouse/partition_service.py b/ripley/_clickhouse/partition_service.py
new file mode 100644
index 0000000..176d171
--- /dev/null
+++ b/ripley/_clickhouse/partition_service.py
@@ -0,0 +1,66 @@
+from typing import Any
+
+from .cmd_service import CmdService
+from .system_service import SystemService
+from .._sql_cmd.clickhouse import (
+ DetachPartitionOnClusterCmd,
+ AttachPartitionOnClusterCmd,
+ DropPartitionOnClusterCmd,
+ MovePartitionOnClusterCmd,
+ ReplacePartitionOnClusterCmd,
+)
+from ..clickhouse_models.table import ClickhouseTableModel as CTable
+
+
+class PartitionService:
+ def __init__(self, client: Any, system: SystemService, cmd: CmdService) -> None:
+ self._client = client
+ self._system = system
+ self._cmd = cmd
+
+ def move_partition(self, from_table: CTable, to_table: CTable, partition: str) -> None:
+ self._cmd.run_cmd(
+ MovePartitionOnClusterCmd,
+ model_params=dict(
+ to_table_name=to_table.full_name,
+ table_name=from_table.full_name,
+ partition=partition,
+ ),
+ )
+
+ def drop_partition(self, table: CTable, partition: str) -> None:
+ self._cmd.run_cmd(
+ DropPartitionOnClusterCmd,
+ model_params=dict(
+ table_name=table.full_name,
+ partition=partition,
+ ),
+ )
+
+ def replace_partition(self, from_table: CTable, to_table: CTable, partition: str) -> None:
+ self._cmd.run_cmd(
+ ReplacePartitionOnClusterCmd,
+ model_params=dict(
+ table_name=to_table.full_name,
+ partition=partition,
+ from_table_name=from_table.full_name,
+ ),
+ )
+
+ def detach_partition(self, table: CTable, partition: str) -> None:
+ self._cmd.run_cmd(
+ DetachPartitionOnClusterCmd,
+ model_params=dict(
+ table_name=table.full_name,
+ partition=partition,
+ ),
+ )
+
+ def attach_partition(self, table: CTable, partition: str) -> None:
+ self._cmd.run_cmd(
+ AttachPartitionOnClusterCmd,
+ model_params=dict(
+ table_name=table.full_name,
+ partition=partition,
+ ),
+ )
diff --git a/ripley/_clickhouse/system_service.py b/ripley/_clickhouse/system_service.py
new file mode 100755
index 0000000..a0de37d
--- /dev/null
+++ b/ripley/_clickhouse/system_service.py
@@ -0,0 +1,90 @@
+from typing import List
+
+from .cmd_service import CmdService
+from ..clickhouse_models.column import ClickhouseColumnModel
+from ..clickhouse_models.db import ClickhouseDbModel
+from ..clickhouse_models.disk import ClickhouseDiskModel
+from ..clickhouse_models.partition import ClickhousePartitionModel
+from ..clickhouse_models.process import ClickhouseProcessModel
+from ..clickhouse_models.table import ClickhouseTableModel
+
+
+class SystemService:
+ def __init__(self, cmd: CmdService):
+ self._cmd = cmd
+
+ def get_databases(self) -> List[ClickhouseDbModel]:
+ return self._cmd.get_records("""
+ SELECT *
+ FROM system.databases
+ WHERE lower(name) != 'information_schema' AND name != 'system'
+ ORDER BY name
+ """, model=ClickhouseDbModel)
+
+ def get_database_by_name(self, name: str = '') -> ClickhouseDbModel:
+ return self._cmd.get_first_record("""
+ SELECT *
+ FROM system.databases
+ WHERE database = %(database)s
+ AND lower(name) != 'information_schema' AND name != 'system'
+ LIMIT 1
+ """, params={'database': self._cmd.get_db_or_default(name)}, model=ClickhouseDbModel)
+
+ def get_table_by_name(self, table: str, db: str = '') -> 'ClickhouseTableModel':
+ return self._cmd.get_first_record("""
+ SELECT *
+ FROM system.tables
+ WHERE database = %(database)s AND name = %(table)s
+ AND lower(name) != 'information_schema' AND name != 'system'
+ LIMIT 1
+ """, params={'database': self._cmd.get_db_or_default(db), 'table': table}, model=ClickhouseTableModel)
+
+ def get_tables_by_db(self, db: str = '') -> List[ClickhouseTableModel]:
+ return self._cmd.get_records("""
+ SELECT *
+ FROM system.tables
+ WHERE database = %(database)s
+ AND lower(name) != 'information_schema' AND name != 'system'
+ """, params={'database': self._cmd.get_db_or_default(db)}, model=ClickhouseTableModel)
+
+ def get_table_partitions(self, table: str, db: str = '') -> List[ClickhousePartitionModel]:
+ return self._cmd.get_records("""
+ SELECT partition,
+ partition_id,
+ active,
+ database,
+ table,
+ visible,
+ sum(rows) AS rows,
+ sum(bytes_on_disk) AS bytes_on_disk,
+ sum(data_compressed_bytes) AS data_compressed_bytes,
+ sum(data_uncompressed_bytes) AS data_uncompressed_bytes
+ FROM system.parts
+ WHERE database = %(database)s AND table = %(table)s
+ AND lower(name) != 'information_schema' AND name != 'system'
+ GROUP BY database, table, partition_id, partition, active, visible
+ ORDER BY partition
+ """, params={'database': self._cmd.get_db_or_default(db), 'table': table}, model=ClickhousePartitionModel)
+
+ def get_processes(self) -> List[ClickhouseProcessModel]:
+ return self._cmd.get_records('SELECT * FROM system.processes', model=ClickhouseProcessModel)
+
+ def get_process_by_query_id(self, query_id: str) -> ClickhouseProcessModel:
+ return self._cmd.get_first_record("""
+ SELECT *
+ FROM system.processes
+ WHERE query_id = %(query_id)s
+ LIMIT 1
+ """, params={'query_id': query_id}, model=ClickhouseProcessModel)
+
+ def get_disks(self) -> List[ClickhouseDiskModel]:
+ return self._cmd.get_records('SELECT * FROM system.disks', model=ClickhouseDiskModel)
+
+ def get_table_columns(self, table: str, db: str = '') -> List[ClickhouseColumnModel]:
+ return self._cmd.get_records("""
+ SELECT *
+ FROM system.columns
+ WHERE table = %(table)s AND database = %(database)s
+ AND lower(name) != 'information_schema' AND name != 'system'
+ ORDER BY position
+ """, params={'table': table, 'database': self._cmd.get_db_or_default(db)}, model=ClickhouseColumnModel)
diff --git a/ripley/_clickhouse/table_service.py b/ripley/_clickhouse/table_service.py
new file mode 100644
index 0000000..de45f51
--- /dev/null
+++ b/ripley/_clickhouse/table_service.py
@@ -0,0 +1,86 @@
+from typing import Any
+
+from .cmd_service import CmdService
+from .system_service import SystemService
+from .._sql_cmd.clickhouse import (
+ RenameTableOnCluster,
+ TruncateOnClusterCmd,
+ CreateTableAsOnClusterCmd,
+ InsertFromS3Cmd,
+ InsertIntoS3Cmd,
+ ClickhouseS3SettingsModel,
+)
+from .._sql_cmd.general import BaseInsertIntoTableFromTable
+from ..clickhouse_models.table import ClickhouseTableModel
+
+
+class TableService:
+ def __init__(self, client: Any, system: SystemService, cmd: CmdService) -> None:
+ self._client = client
+ self._cmd = cmd
+ self._system = system
+
+ def create_table_as(
+ self,
+ from_table: ClickhouseTableModel,
+ table: str,
+ db: str = '',
+ order_by: list = None,
+ partition_by: list = None,
+ engine: str = ''
+ ) -> ClickhouseTableModel:
+ table_full_name = self._cmd.get_full_table_name(table, db)
+ order = f'ORDER BY {", ".join(order_by) if order_by else from_table.sorting_key}'
+ partition = ", ".join(partition_by) if partition_by else from_table.partition_key
+ if partition:
+ partition = f'PARTITION BY {partition}'
+
+ self._cmd.run_cmd(
+ CreateTableAsOnClusterCmd,
+ model_params=dict(
+ table_name=table_full_name,
+ from_table=from_table.full_name,
+ order_by=order,
+ partition_by=partition,
+ engine=engine if engine else from_table.engine,
+ ),
+ )
+
+ return self._system.get_table_by_name(table, db)
+
+ def insert_from_table(self, from_table: ClickhouseTableModel, to_table: ClickhouseTableModel) -> None:
+ self._cmd.run_cmd(
+ BaseInsertIntoTableFromTable,
+ model_params=dict(from_table=from_table.full_name, to_table=to_table.full_name),
+ )
+
+ def truncate(self, table: str, db: str = '') -> None:
+ table_name = self._cmd.get_full_table_name(table, db)
+ self._cmd.run_cmd(
+ TruncateOnClusterCmd,
+ model_params=dict(table_name=table_name),
+ )
+
+ def insert_from_s3(self, table: ClickhouseTableModel, s3_settings: ClickhouseS3SettingsModel):
+ self._cmd.run_cmd(
+ InsertFromS3Cmd,
+ model_params=dict(
+ table_name=table.full_name,
+ s3_settings=s3_settings,
+ ),
+ )
+
+ def insert_table_to_s3(self, table: ClickhouseTableModel, s3_settings: ClickhouseS3SettingsModel):
+ self._cmd.run_cmd(
+ InsertIntoS3Cmd,
+ model_params=dict(table_name=table.full_name, s3_settings=s3_settings),
+ )
+
+ def rename_table(self, table: ClickhouseTableModel, new_name: str, db: str = '') -> ClickhouseTableModel:
+ full_name = self._cmd.get_full_table_name(new_name, db)
+ self._cmd.run_cmd(
+ RenameTableOnCluster,
+ model_params=dict(table=table.full_name, new_name=full_name),
+ )
+
+ return self._system.get_table_by_name(new_name, db)
diff --git a/ripley/_log.py b/ripley/_log.py
new file mode 100644
index 0000000..758d84d
--- /dev/null
+++ b/ripley/_log.py
@@ -0,0 +1,3 @@
+import logging
+
+log = logging.getLogger('ripley')
diff --git a/ripley/_protocols/__init__.py b/ripley/_protocols/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ripley/_protocols/clickhouse.py b/ripley/_protocols/clickhouse.py
new file mode 100644
index 0000000..90c604b
--- /dev/null
+++ b/ripley/_protocols/clickhouse.py
@@ -0,0 +1,213 @@
+import abc
+from typing import Protocol, List
+
+from ..clickhouse_models.column import ClickhouseColumnModel as Column
+from ..clickhouse_models.db import ClickhouseDbModel as Db
+from ..clickhouse_models.disk import ClickhouseDiskModel as Disk
+from ..clickhouse_models.partition import ClickhousePartitionModel as Partition
+from ..clickhouse_models.process import ClickhouseProcessModel as Process
+from ..clickhouse_models.s3_settings import ClickhouseS3SettingsModel as S3Settings
+from ..clickhouse_models.table import ClickhouseTableModel as Table
+
+
+class ClickhouseProtocol(Protocol, metaclass=abc.ABCMeta):
+ @property
+ def active_db(self) -> str:
+ """
+ name of active database
+ """
+
+ @property
+ def on_cluster(self) -> str:
+ """
+ name of ON CLUSTER mode
+ """
+
+ @property
+ def settings(self) -> dict:
+ """
+ see: https://clickhouse.com/docs/en/operations/settings/settings
+ """
+
+ @abc.abstractmethod
+ def set_settings(self, settings: dict):
+ """
+ see: https://clickhouse.com/docs/en/operations/settings/settings
+ """
+
+ @abc.abstractmethod
+ def skip_settings(self):
+ """
+ see: https://clickhouse.com/docs/en/operations/settings/settings
+ """
+
+ @abc.abstractmethod
+ def set_on_cluster(self, name: str):
+ """
+ enable ON CLUSTER mode
+ """
+
+ @abc.abstractmethod
+ def skip_on_cluster(self):
+ """
+ disable ON CLUSTER mode
+ """
+
+ @abc.abstractmethod
+ def create_db(self, name: str, engine: str = '') -> Db:
+ """
+ see: https://clickhouse.com/docs/en/sql-reference/statements/create/database
+ """
+
+ @abc.abstractmethod
+ def ping(self) -> bool:
+ pass
+
+ @abc.abstractmethod
+ def exec(self, sql: str, params: dict = None) -> List:
+ pass
+
+ @abc.abstractmethod
+ def move_partition(self, from_table: Table, to_table: Table, partition: str) -> None:
+ """
+ see: https://clickhouse.com/docs/en/sql-reference/statements/alter/partition#move-partition-to-table
+ """
+
+ @abc.abstractmethod
+ def replace_partition(self, from_table: Table, to_table: Table, partition: str) -> None:
+ """
+ see: https://clickhouse.com/docs/en/sql-reference/statements/alter/partition#replace-partition
+ """
+
+ @abc.abstractmethod
+ def drop_partition(self, table: Table, partition: str) -> None:
+ """
+ see: https://clickhouse.com/docs/en/sql-reference/statements/alter/partition#drop-partitionpart
+ """
+
+ @abc.abstractmethod
+ def detach_partition(self, table: Table, partition: str) -> None:
+ """
+ see: https://clickhouse.com/docs/en/sql-reference/statements/alter/partition#detach-partitionpart
+ """
+
+ @abc.abstractmethod
+ def attach_partition(self, table: Table, partition: str) -> None:
+ """
+ see: https://clickhouse.com/docs/en/sql-reference/statements/alter/partition#attach-partitionpart
+ """
+
+ @abc.abstractmethod
+ def get_databases(self) -> List[Db]:
+ """
+ system.databases
+ see: https://clickhouse.com/docs/en/operations/system-tables/databases
+ """
+
+ @abc.abstractmethod
+ def get_database_by_name(self, name: str = '') -> Db:
+ """
+ system.databases
+ see: https://clickhouse.com/docs/en/operations/system-tables/databases
+ """
+
+ @abc.abstractmethod
+ def get_tables_by_db(self, db: str = '') -> List[Table]:
+ """
+ system.tables
+ see: https://clickhouse.com/docs/en/operations/system-tables/tables
+ """
+
+ @abc.abstractmethod
+ def get_table_by_name(self, table: str, db: str = '') -> Table:
+ """
+ system.tables
+ see: https://clickhouse.com/docs/en/operations/system-tables/tables
+ """
+
+ @abc.abstractmethod
+ def get_table_partitions(self, table: str, db: str = '') -> List[Partition]:
+ """
+ system.parts
+ see: https://clickhouse.com/docs/en/operations/system-tables/parts
+ """
+
+ @abc.abstractmethod
+ def get_processes(self) -> List[Process]:
+ """
+ system.processes
+ see: https://clickhouse.com/docs/en/operations/system-tables/processes
+ """
+
+ @abc.abstractmethod
+ def get_process_by_query_id(self, query_id: str) -> Process:
+ """
+ system.processes
+ see: https://clickhouse.com/docs/en/operations/system-tables/processes
+ """
+
+ @abc.abstractmethod
+ def get_disks(self) -> List[Disk]:
+ """
+ system.disks
+ see: https://clickhouse.com/docs/en/operations/system-tables/disks
+ """
+
+ @abc.abstractmethod
+ def get_table_columns(self, table: str, db: str = '') -> List[Column]:
+ """
+ system.columns
+ https://clickhouse.com/docs/en/operations/system-tables/columns
+ """
+
+ @abc.abstractmethod
+ def create_table_as(
+ self,
+ from_table: Table,
+ table: str,
+ db: str = '',
+ order_by: list = None,
+ partition_by: list = None,
+ engine: str = '',
+ ) -> Table:
+ """
+ Creates a table with the same structure or with custom ORDER BY / PARTITION BY / Engine
+ Query result:
+ CREATE TABLE {db}.{table}
+ ENGINE = {engine}
+ ORDER BY {order_by}
+ PARTITION BY {partition_by}
+ AS {from_table}
+ """
+
+ @abc.abstractmethod
+ def insert_from_table(self, from_table: Table, to_table: Table) -> None:
+ """
+ INSERT INTO db1.table1 SELECT * FROM db2.table2
+ """
+
+ @abc.abstractmethod
+ def truncate(self, table: str, db: str = '') -> None:
+ pass
+
+ @abc.abstractmethod
+ def insert_from_s3(self, table: Table, s3_settings: S3Settings):
+ """
+ INSERT INTO db1.table1 SELECT * FROM s3(...)
+
+ see: https://clickhouse.com/docs/en/integrations/s3#inserting-data-from-s3
+ """
+
+ @abc.abstractmethod
+ def insert_table_to_s3(self, table: Table, s3_settings: S3Settings):
+ """
+ INSERT INTO FUNCTION s3(...) SELECT * FROM {db}.{table}
+
+ see: https://clickhouse.com/docs/en/integrations/s3#exporting-data
+ """
+
+ @abc.abstractmethod
+ def rename_table(self, table: Table, new_name: str, db: str = '') -> None:
+ """
+ https://clickhouse.com/docs/en/sql-reference/statements/rename
+ """
diff --git a/ripley/_sql_cmd/__init__.py b/ripley/_sql_cmd/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ripley/_sql_cmd/clickhouse.py b/ripley/_sql_cmd/clickhouse.py
new file mode 100644
index 0000000..033f53c
--- /dev/null
+++ b/ripley/_sql_cmd/clickhouse.py
@@ -0,0 +1,182 @@
+from .._sql_cmd.general import (
+ BaseAlter,
+ BaseTruncate,
+ BaseCreateDb,
+ BaseRenameTable,
+ AbstractSql,
+ BaseCreateTable,
+)
+from ..clickhouse_models.s3_settings import ClickhouseS3SettingsModel
+
+
+class CreateDbOnCluster(BaseCreateDb):
+ def __init__(self, name: str, on_cluster: str = '', engine: str = ''):
+ super().__init__(name)
+ self._on_cluster = on_cluster
+ self._engine = engine
+
+ def to_sql(self) -> str:
+ cmd = super().to_sql()
+ on_cluster = f"ON CLUSTER '{self._on_cluster}'" if self._on_cluster else ''
+ engine = f"ENGINE {self._engine}" if self._engine else ''
+ return f"{cmd}{on_cluster}{engine}"
+
+
+class RenameTableOnCluster(BaseRenameTable):
+ def __init__(self, table: str, new_name: str, on_cluster: str = ''):
+ super().__init__(table, new_name)
+ self._on_cluster = on_cluster
+
+ def to_sql(self) -> str:
+ cmd = super().to_sql()
+ return f"{cmd} ON CLUSTER '{self._on_cluster}'" if self._on_cluster else cmd
+
+
+class AlterOnClusterCmd(BaseAlter):
+ def __init__(self, table_name: str, on_cluster: str = ''):
+ super().__init__(table_name)
+ self._on_cluster = on_cluster
+
+ def to_sql(self) -> str:
+ cmd = super().to_sql()
+ return f"{cmd} ON CLUSTER '{self._on_cluster}'" if self._on_cluster else cmd
+
+
+class DetachPartitionOnClusterCmd(AlterOnClusterCmd):
+ def __init__(self, table_name, partition: str, on_cluster: str = ''):
+ super().__init__(table_name, on_cluster)
+ self._partition = partition
+
+ def to_sql(self) -> str:
+ cmd = super().to_sql()
+ return f"{cmd} DETACH PARTITION '{self._partition}'"
+
+
+class AttachPartitionOnClusterCmd(AlterOnClusterCmd):
+ def __init__(self, table_name: str, partition: str, on_cluster: str = ''):
+ super().__init__(table_name, on_cluster)
+ self._partition = partition
+
+ def to_sql(self) -> str:
+ cmd = super().to_sql()
+ return f"{cmd} ATTACH PARTITION '{self._partition}'"
+
+
+class DropPartitionOnClusterCmd(AlterOnClusterCmd):
+ def __init__(self, table_name: str, partition: str, on_cluster: str = ''):
+ super().__init__(table_name, on_cluster)
+ self._partition = partition
+
+ def to_sql(self) -> str:
+ cmd = super().to_sql()
+ return f"{cmd} DROP PARTITION '{self._partition}'"
+
+
+class MovePartitionOnClusterCmd(AlterOnClusterCmd):
+ def __init__(self, table_name: str, partition: str, to_table_name, on_cluster: str = ''):
+ super().__init__(table_name, on_cluster)
+ self._to_table_name = to_table_name
+ self._partition = partition
+
+ def to_sql(self) -> str:
+ cmd = super().to_sql()
+ return f"{cmd} MOVE PARTITION '{self._partition}' TO TABLE {self._to_table_name}"
+
+
+class ReplacePartitionOnClusterCmd(AlterOnClusterCmd):
+ def __init__(self, table_name: str, partition: str, from_table_name, on_cluster: str = ''):
+ super().__init__(table_name, on_cluster)
+ self._from_table_name = from_table_name
+ self._partition = partition
+
+ def to_sql(self) -> str:
+ cmd = super().to_sql()
+ return f"{cmd} REPLACE PARTITION '{self._partition}' FROM {self._from_table_name}"
+
+
+class TruncateOnClusterCmd(BaseTruncate):
+ def __init__(self, table_name: str, on_cluster: str = ''):
+ super().__init__(table_name)
+ self._on_cluster = on_cluster
+
+ def to_sql(self) -> str:
+ cmd = super().to_sql()
+ cmd = f"{cmd} ON CLUSTER '{self._on_cluster}'" if self._on_cluster else cmd
+ return cmd
+
+
+class InsertIntoS3Cmd(AbstractSql):
+ def __init__(self, table_name: str, s3_settings: ClickhouseS3SettingsModel):
+ self._s3_settings = s3_settings
+ self._table_name = table_name
+
+ def __repr__(self):
+ return self._get_s3_cmd()
+
+ def _get_s3_cmd(self, key_id: str = '*', secret: str = '*') -> str:
+ url = self._s3_settings.url
+ file_format = self._s3_settings.file_format
+ compression = self._s3_settings.compression_method
+ s3_cmd = f"""s3('{url}', '{key_id}', '{secret}', '{file_format}', '{compression}')"""
+
+ return f'INSERT INTO FUNCTION {s3_cmd} SELECT * FROM {self._table_name}'
+
+ def to_sql(self) -> str:
+ return self._get_s3_cmd(self._s3_settings.access_key_id, self._s3_settings.secret_access_key)
+
+
+class InsertFromS3Cmd(AbstractSql):
+ def __init__(self, table_name: str, s3_settings: ClickhouseS3SettingsModel):
+ self._s3_settings = s3_settings
+ self._table_name = table_name
+
+ def __repr__(self):
+ return self._get_s3_cmd()
+
+ def _get_s3_cmd(self, key_id: str = '*', secret: str = '*') -> str:
+ url = self._s3_settings.url
+ file_format = self._s3_settings.file_format
+ compression = self._s3_settings.compression_method
+ s3_cmd = f"s3('{url}', '{key_id}', '{secret}', '{file_format}', '{compression}')"
+
+ return f'INSERT INTO {self._table_name} SELECT * FROM {s3_cmd}'
+
+ def to_sql(self) -> str:
+ return self._get_s3_cmd(self._s3_settings.access_key_id, self._s3_settings.secret_access_key)
+
+
+class CreateTableOnClusterCmd(BaseCreateTable):
+ def __init__(self, table_name: str, on_cluster: str = ''):
+ super().__init__(table_name)
+ self._on_cluster = on_cluster
+
+ def to_sql(self) -> str:
+ cmd = super().to_sql()
+ cmd = f"{cmd} ON CLUSTER '{self._on_cluster}'" if self._on_cluster else cmd
+ return cmd
+
+
+class CreateTableAsOnClusterCmd(CreateTableOnClusterCmd):
+ def __init__(
+ self,
+ table_name: str,
+ from_table: str,
+ on_cluster: str = '',
+ order_by: str = '',
+ partition_by: str = '',
+ engine: str = '',
+ ):
+ super().__init__(table_name, on_cluster)
+ self._from_table = from_table
+ self._order_by = order_by
+ self._partition_by = partition_by
+ self._engine = engine
+
+ def to_sql(self) -> str:
+ return f"""
+ {super().to_sql()}
+ engine = {self._engine}
+ {self._order_by}
+ {self._partition_by}
+ AS {self._from_table}
+ """
diff --git a/ripley/_sql_cmd/general.py b/ripley/_sql_cmd/general.py
new file mode 100644
index 0000000..375d3ab
--- /dev/null
+++ b/ripley/_sql_cmd/general.py
@@ -0,0 +1,58 @@
+class AbstractSql:
+ def __repr__(self):
+ return self.to_sql().replace(' ', '').replace('\n\n', '\n')
+
+ def to_sql(self) -> str:
+ raise NotImplementedError()
+
+
+class BaseTable(AbstractSql):
+ def __init__(self, table_name: str):
+ self._table_name = table_name
+
+ def to_sql(self) -> str:
+ return f'TABLE {self._table_name}'
+
+
+class BaseTruncate(BaseTable):
+ def to_sql(self) -> str:
+ table = super().to_sql()
+ return f'TRUNCATE {table}'
+
+
+class BaseAlter(BaseTable):
+ def to_sql(self) -> str:
+ table = super().to_sql()
+ return f'ALTER {table}'
+
+
+class BaseCreateTable(BaseTable):
+ def to_sql(self) -> str:
+ table = super().to_sql()
+ return f'CREATE {table}'
+
+
+class BaseCreateDb(AbstractSql):
+ def __init__(self, name: str):
+ self._name = name
+
+ def to_sql(self) -> str:
+ return f'CREATE DATABASE IF NOT EXISTS {self._name}'
+
+
+class BaseRenameTable(AbstractSql):
+ def __init__(self, table: str, new_name: str):
+ self._table = table
+ self._new_name = new_name
+
+ def to_sql(self) -> str:
+ return f'RENAME TABLE {self._table} TO {self._new_name}'
+
+
+class BaseInsertIntoTableFromTable(AbstractSql):
+ def __init__(self, from_table: str, to_table: str):
+ self._from_table = from_table
+ self._to_table = to_table
+
+ def to_sql(self) -> str:
+ return f'INSERT INTO {self._to_table} SELECT * FROM {self._from_table}'
diff --git a/ripley/clickhouse_models/__init__.py b/ripley/clickhouse_models/__init__.py
new file mode 100755
index 0000000..e69de29
diff --git a/ripley/clickhouse_models/column.py b/ripley/clickhouse_models/column.py
new file mode 100644
index 0000000..150a9bd
--- /dev/null
+++ b/ripley/clickhouse_models/column.py
@@ -0,0 +1,32 @@
+from dataclasses import dataclass
+from typing import Union
+
+from .._base_model import BaseModel
+
+
+@dataclass
+class ClickhouseColumnModel(BaseModel):
+ database: str
+ table: str
+ name: str
+ type: str
+ default_kind: str
+ default_expression: str
+ comment: str
+ compression_codec: str
+
+ position: int
+ data_compressed_bytes: int
+ data_uncompressed_bytes: int
+ marks_bytes: int
+ is_in_partition_key: int
+ is_in_sorting_key: int
+ is_in_primary_key: int
+
+ numeric_precision_radix: Union[int, None]
+ numeric_scale: Union[int, None]
+ datetime_precision: Union[int, None]
+ is_in_sampling_key: int
+ character_octet_length: Union[int, None]
+ numeric_precision: Union[int, None]
+
diff --git a/ripley/clickhouse_models/db.py b/ripley/clickhouse_models/db.py
new file mode 100755
index 0000000..7570f81
--- /dev/null
+++ b/ripley/clickhouse_models/db.py
@@ -0,0 +1,16 @@
+from dataclasses import dataclass
+from uuid import UUID
+
+from .._base_model import BaseModel
+
+
+@dataclass
+class ClickhouseDbModel(BaseModel):
+ name: str
+ engine: str
+ data_path: str
+ metadata_path: str
+ uuid: UUID
+ engine_full: str
+ comment: str
+
diff --git a/ripley/clickhouse_models/disk.py b/ripley/clickhouse_models/disk.py
new file mode 100755
index 0000000..65b6354
--- /dev/null
+++ b/ripley/clickhouse_models/disk.py
@@ -0,0 +1,21 @@
+from dataclasses import dataclass
+
+from .._base_model import BaseModel
+
+
+@dataclass
+class ClickhouseDiskModel(BaseModel):
+ name: str
+ path: str
+ cache_path: str
+ type: str
+
+ free_space: int
+ total_space: int
+ unreserved_space: int
+ keep_free_space: int
+ is_encrypted: int
+ is_read_only: int
+ is_write_once: int
+ is_remote: int
+ is_broken: int
diff --git a/ripley/clickhouse_models/partition.py b/ripley/clickhouse_models/partition.py
new file mode 100755
index 0000000..739cba3
--- /dev/null
+++ b/ripley/clickhouse_models/partition.py
@@ -0,0 +1,18 @@
+from dataclasses import dataclass
+
+from .._base_model import BaseModel
+
+
+@dataclass
+class ClickhousePartitionModel(BaseModel):
+ database: str
+ table: str
+ partition: str
+ partition_id: str
+
+ active: int
+ visible: int
+ rows: int
+ bytes_on_disk: int
+ data_compressed_bytes: int
+ data_uncompressed_bytes: int
diff --git a/ripley/clickhouse_models/process.py b/ripley/clickhouse_models/process.py
new file mode 100755
index 0000000..66aa4d8
--- /dev/null
+++ b/ripley/clickhouse_models/process.py
@@ -0,0 +1,61 @@
+from copy import deepcopy
+from dataclasses import dataclass
+from ipaddress import IPv6Address
+
+from .._base_model import BaseModel
+
+
+@dataclass
+class ClickhouseProcessModel(BaseModel):
+ current_database: str
+ user: str
+ query_id: str
+ initial_query_id: str
+ initial_user: str
+ os_user: str
+ client_hostname: str
+ client_name: str
+ http_user_agent: str
+ http_referer: str
+ forwarded_for: str
+ quota_key: str
+ query: str
+ query_kind: str
+
+ is_initial_query: int
+ port: int
+ initial_port: int
+ interface: int
+ client_revision: int
+ client_version_major: int
+ client_version_minor: int
+ client_version_patch: int
+ http_method: int
+ distributed_depth: int
+ is_cancelled: int
+ is_all_data_sent: int
+ read_bytes: int
+ read_rows: int
+ written_rows: int
+ written_bytes: int
+ total_rows_approx: int
+ memory_usage: int
+ peak_memory_usage: int
+
+ elapsed: float
+ thread_ids: list
+
+ address: IPv6Address
+ initial_address: IPv6Address
+
+ def __init__(self, *args, **kwargs):
+ kwargs_copy = deepcopy(kwargs)
+ for key, alias in (
+ ('ProfileEvents', 'profile_events'),
+ ('Settings', 'settings'),
+ ):
+ kwargs[alias] = kwargs[key]
+ del kwargs[alias]
+
+ self.args = args
+ self.kwargs = kwargs_copy
diff --git a/ripley/clickhouse_models/s3_settings.py b/ripley/clickhouse_models/s3_settings.py
new file mode 100644
index 0000000..7ad7d64
--- /dev/null
+++ b/ripley/clickhouse_models/s3_settings.py
@@ -0,0 +1,15 @@
+import os
+from dataclasses import dataclass, field
+
+from .._base_model import BaseModel
+
+
+@dataclass
+class ClickhouseS3SettingsModel(BaseModel):
+ url: str
+
+ compression_method: str = 'auto'
+ file_format: str = 'CSV'
+
+ access_key_id: str = field(default_factory=lambda: os.environ.get('AWS_ACCESS_KEY_ID'))
+ secret_access_key: str = field(default_factory=lambda: os.environ.get('AWS_SECRET_ACCESS_KEY'))
diff --git a/ripley/clickhouse_models/table.py b/ripley/clickhouse_models/table.py
new file mode 100755
index 0000000..bbc7d3b
--- /dev/null
+++ b/ripley/clickhouse_models/table.py
@@ -0,0 +1,48 @@
+from dataclasses import dataclass
+from datetime import datetime
+from typing import List
+from uuid import UUID
+
+from .._base_model import BaseModel
+
+
+@dataclass
+class ClickhouseTableModel(BaseModel):
+ partition_key: str
+ database: str
+ as_select: str
+ comment: str
+ create_table_query: str
+ name: str
+ engine: str
+ engine_full: str
+ metadata_path: str
+ primary_key: str
+ sampling_key: str
+ storage_policy: str
+ sorting_key: str
+
+ parts: int
+ is_temporary: int
+ total_bytes: int
+ total_rows: int
+ has_own_data: int
+ total_marks: int
+ lifetime_rows: int
+ lifetime_bytes: int
+ active_parts: int
+
+ data_paths: List[str]
+ dependencies_database: List[str]
+ dependencies_table: List[str]
+ loading_dependencies_database: List[str]
+ loading_dependencies_table: List[str]
+ loading_dependent_database: List[str]
+ loading_dependent_table: List[str]
+
+ uuid: UUID
+ metadata_modification_time: datetime
+
+ @property
+ def full_name(self) -> str:
+ return f'{self.database}.{self.name}'
diff --git a/ruff.toml b/ruff.toml
new file mode 100755
index 0000000..fc906d8
--- /dev/null
+++ b/ruff.toml
@@ -0,0 +1,82 @@
+include = ["*.py", "ripley/*.py"]
+exclude = [
+ "templates/*",
+ "tests/*",
+ ".bzr",
+ ".direnv",
+ ".eggs",
+ ".git",
+ ".git-rewrite",
+ ".hg",
+ ".ipynb_checkpoints",
+ ".mypy_cache",
+ ".nox",
+ ".pants.d",
+ ".pyenv",
+ ".pytest_cache",
+ ".pytype",
+ ".ruff_cache",
+ ".svn",
+ ".tox",
+ ".venv",
+ ".vscode",
+ "__pypackages__",
+ "_build",
+ "buck-out",
+ "build",
+ "dist",
+ "node_modules",
+ "site-packages",
+ "venv",
+ "env",
+]
+
+# Same as Black.
+line-length = 120
+indent-width = 4
+
+# Assume Python 3.8
+target-version = "py38"
+
+[lint]
+# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
+# Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or
+# McCabe complexity (`C901`) by default.
+select = ["F", 'W', 'E', 'C']
+ignore = ["C414", "C408"]
+
+# Allow fix for all enabled rules (when `--fix`) is provided.
+fixable = ["ALL"]
+unfixable = []
+
+# Allow unused variables when underscore-prefixed.
+dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
+
+[format]
+# Like Black, use double quotes for strings.
+quote-style = "double"
+
+# Like Black, indent with spaces, rather than tabs.
+indent-style = "space"
+
+# Like Black, respect magic trailing commas.
+skip-magic-trailing-comma = false
+
+# Like Black, automatically detect the appropriate line ending.
+line-ending = "auto"
+
+# Enable auto-formatting of code examples in docstrings. Markdown,
+# reStructuredText code/literal blocks and doctests are all supported.
+#
+# This is currently disabled by default, but it is planned for this
+# to be opt-out in the future.
+docstring-code-format = false
+
+# Set the line length limit used when formatting code snippets in
+# docstrings.
+#
+# This only has an effect when the `docstring-code-format` setting is
+# enabled.
+docstring-code-line-length = "dynamic"
+
+[extend-per-file-ignores]
\ No newline at end of file
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100755
index 0000000..e69de29
diff --git a/tests/clickhouse/__init__.py b/tests/clickhouse/__init__.py
new file mode 100755
index 0000000..e69de29
diff --git a/tests/clickhouse/_base_test.py b/tests/clickhouse/_base_test.py
new file mode 100755
index 0000000..d8a54f9
--- /dev/null
+++ b/tests/clickhouse/_base_test.py
@@ -0,0 +1,59 @@
+import unittest
+from enum import Enum
+
+from clickhouse_driver import Client
+
+from ripley import from_clickhouse
+
+
+class DB(Enum):
+ RIPLEY_TESTS = f'ripley_tests_db1'
+ RIPLEY_TESTS2 = f'ripley_tests_db2'
+
+
+_client = Client(host='localhost', port=9000, user='default', password='', database='default')
+_clickhouse = from_clickhouse(_client)
+# create default db for tests
+_clickhouse.create_db(DB.RIPLEY_TESTS.value)
+_clickhouse.create_db(DB.RIPLEY_TESTS2.value)
+_client = Client(host='localhost', port=9000, user='default', password='', database=DB.RIPLEY_TESTS.value)
+
+
+class BaseClickhouseTest(unittest.TestCase):
+ maxDiff = 50000
+ clickhouse = from_clickhouse(_client)
+
+ def setUp(self):
+ for db in DB:
+ self.clickhouse.exec(f'DROP DATABASE IF EXISTS {db.value}')
+ self.clickhouse.exec(f'CREATE DATABASE {db.value}')
+
+ def get_full_table_name(self, table: str, db: str) -> str:
+ return f'{db}.{table}' if db else table
+
+ def create_test_table(self, test_name: str, db_name: str = ''):
+ """
+ test table with 2 partitions. 1000 records
+ """
+ if db_name:
+ schema_tbl = f'{db_name}.{test_name}'
+ else:
+ schema_tbl = test_name
+
+ self.clickhouse.exec(f"""
+ CREATE TABLE {schema_tbl}
+ (
+ key UInt64,
+ value String,
+ day Date
+ )
+ ENGINE MergeTree()
+ PARTITION BY day
+ ORDER BY key AS (
+ SELECT rowNumberInAllBlocks() AS key,
+ concat(toString(key), '+', toString(key)) AS value,
+ if(modulo(key, 3), '2024-01-01', '2025-01-01') AS day
+ FROM generateRandom('a Array(Int8), d Decimal32(4), c Tuple(DateTime64(3), UUID)', 1, 1, 1)
+ LIMIT 1000
+ )
+ """)
diff --git a/tests/clickhouse/test_clickhouse_partition_service.py b/tests/clickhouse/test_clickhouse_partition_service.py
new file mode 100644
index 0000000..ddf6bd5
--- /dev/null
+++ b/tests/clickhouse/test_clickhouse_partition_service.py
@@ -0,0 +1,142 @@
+from datetime import datetime
+
+from parameterized import parameterized
+
+from tests.clickhouse._base_test import BaseClickhouseTest, DB
+
+
+class TestClickhousePartitionService(BaseClickhouseTest):
+ @parameterized.expand([
+ [DB.RIPLEY_TESTS.value, DB.RIPLEY_TESTS2.value],
+ [DB.RIPLEY_TESTS2.value, DB.RIPLEY_TESTS.value],
+ ])
+ def test_move_partition(self, from_db: str, to_db: str):
+ from_table_name = 'move_partition'
+ to_table_name = f'to_{from_table_name}'
+
+ self.create_test_table(from_table_name, from_db)
+ from_table = self.clickhouse.get_table_by_name(from_table_name, from_db)
+ to_table = self.clickhouse.create_table_as(
+ from_table=from_table,
+ table=to_table_name,
+ db=to_db,
+ )
+
+ def _get_partition(_table: str, _db: str, _partition: str):
+ _db = _db or self.clickhouse.active_db
+ return self.clickhouse.exec(f"""
+ SELECT partition,
+ active,
+ visible,
+ sum(rows) AS rows,
+ sum(data_uncompressed_bytes) AS data_uncompressed_bytes
+ FROM system.parts
+ WHERE database = %(_db)s AND table = %(_table)s AND partition = %(_partition)s
+ AND active = 1
+ AND visible = 1
+ GROUP BY partition, active, visible
+ """, params={'_db': _db, '_table': _table, '_partition': _partition})
+
+ partition = '2024-01-01'
+ origin_partition = _get_partition(from_table_name, from_db, partition)
+ self.clickhouse.move_partition(
+ from_table=from_table,
+ to_table=to_table,
+ partition=partition,
+ )
+
+ target_partition = _get_partition(to_table_name, to_db, partition)
+ self.assertEqual(origin_partition, target_partition)
+ self.assertListEqual([], _get_partition(from_table_name, from_db, partition))
+
+ @parameterized.expand([
+ [DB.RIPLEY_TESTS.value, '2024-01-01', [('2025-01-01',)]],
+ [DB.RIPLEY_TESTS2.value, '2025-01-01', [('2024-01-01',)]],
+ ])
+ def test_drop_partition(self, db_name: str, partition: str, expected: list):
+ table_name = 'drop_partition'
+
+ self.create_test_table(table_name, db_name)
+ table = self.clickhouse.get_table_by_name(table_name, db_name)
+ self.clickhouse.drop_partition(table, partition)
+
+ result = self.clickhouse.exec("""
+ SELECT DISTINCT partition
+ FROM system.parts
+ WHERE database = %(db_name)s AND table = %(table)s
+ AND rows > 0
+ """, params={'db_name': db_name, 'table': table_name})
+
+ self.assertEqual(result, expected)
+
+ @parameterized.expand([
+ [
+ DB.RIPLEY_TESTS.value,
+ DB.RIPLEY_TESTS2.value,
+ '2024-01-01',
+ 335,
+ [(0, 'replace_partition_test', datetime(2024, 1, 1).date())],
+ ],
+ [
+ DB.RIPLEY_TESTS2.value,
+ DB.RIPLEY_TESTS.value,
+ '2025-01-01',
+ 667,
+ [(0, 'replace_partition_test', datetime(2025, 1, 1).date())],
+ ],
+ ])
+ def test_replace_partition(
+ self,
+ source_db: str,
+ target_db: str,
+ partition: str,
+ expected_count: int,
+ expected: list,
+ ):
+ new_data_table = 'replace_partition'
+ old_data_table = 'replace_partition_from'
+
+ self.create_test_table(old_data_table, source_db)
+ old_table = self.clickhouse.get_table_by_name(old_data_table, source_db)
+
+ self.clickhouse.create_table_as(from_table=old_table, table=new_data_table, db=target_db)
+ new_table = self.clickhouse.get_table_by_name(new_data_table, target_db)
+
+ self.clickhouse.exec(f"""
+ INSERT INTO {new_table.full_name}
+ SELECT 0, 'replace_partition_test', '{partition}'
+ """)
+
+ self.clickhouse.replace_partition(from_table=new_table, to_table=old_table, partition=partition)
+ result = self.clickhouse.exec(f'SELECT count(*) AS records FROM {old_table.full_name}')
+ self.assertEqual(result, [(expected_count,)])
+
+ for table_name in [new_table.full_name, old_table.full_name]:
+ result = self.clickhouse.exec(f"SELECT * FROM {table_name} WHERE day = '{partition}'")
+ self.assertEqual(result, expected)
+
+ @parameterized.expand([
+ [DB.RIPLEY_TESTS.value, '2025-01-01'],
+ [DB.RIPLEY_TESTS2.value, '2024-01-01'],
+ ])
+ def test_detach_attach_partition(self, db_name: str, partition: str):
+ def select_is_active(_table: str, _db: str, _partition: str):
+ return self.clickhouse.exec("""
+ SELECT active
+ FROM system.parts
+ WHERE table = %(_table)s
+ AND database = %(_db)s AND partition = %(_partition)s
+ AND visible = 1
+ """, params={'_table': _table, '_db': _db, '_partition': _partition})
+
+ table_name = 'detach_attach_partition'
+ self.create_test_table(table_name, db_name)
+ table = self.clickhouse.get_table_by_name(table_name, db_name)
+ self.clickhouse.detach_partition(table, partition)
+
+ active = select_is_active(table_name, db_name, partition)
+ self.assertEqual(active, [])
+ self.clickhouse.attach_partition(table, partition)
+
+ active = select_is_active(table_name, db_name, partition)
+ self.assertEqual(active, [(1,)])
diff --git a/tests/clickhouse/test_clickhouse_system_service.py b/tests/clickhouse/test_clickhouse_system_service.py
new file mode 100755
index 0000000..bb7ce6d
--- /dev/null
+++ b/tests/clickhouse/test_clickhouse_system_service.py
@@ -0,0 +1,145 @@
+from parameterized import parameterized
+
+from ripley.clickhouse_models.db import ClickhouseDbModel
+from ripley.clickhouse_models.disk import ClickhouseDiskModel
+from ripley.clickhouse_models.partition import ClickhousePartitionModel
+from ripley.clickhouse_models.table import ClickhouseTableModel
+from tests.clickhouse._base_test import BaseClickhouseTest, DB
+
+
+class TestClickhouseSystemService(BaseClickhouseTest):
+ def test_get_disks(self):
+ disks = self.clickhouse.get_disks()
+ self.assertListEqual(
+ disks,
+ [
+ ClickhouseDiskModel(**{
+ 'cache_path': '', 'is_broken': 0, 'is_encrypted': 0, 'is_read_only': 0, 'is_remote': 0,
+ 'is_write_once': 0, 'keep_free_space': 0, 'name': 'default', 'path': '/var/lib/clickhouse/',
+ 'type': 'local', 'total_space': disks[0].total_space, 'free_space': disks[0].free_space,
+ 'unreserved_space': disks[0].unreserved_space,
+ })
+ ])
+
+ @parameterized.expand([
+ [DB.RIPLEY_TESTS.value],
+ [DB.RIPLEY_TESTS2.value],
+ ])
+ def test_get_partition(self, db_name: str):
+ table = 'get_partition'
+ self.create_test_table(table, db_name)
+ partitions = self.clickhouse.get_table_partitions(table, db_name)
+ self.assertListEqual(
+ [
+ ClickhousePartitionModel(**{
+ 'database': db_name, 'table': table, 'partition': '2024-01-01', 'partition_id': '20240101',
+ 'active': 1, 'rows': 666, 'data_uncompressed_bytes': 11844,
+ 'data_compressed_bytes': partitions[0].data_compressed_bytes,
+ 'bytes_on_disk': partitions[0].bytes_on_disk,
+ 'visible': 1,
+ }),
+ ClickhousePartitionModel(**{
+ 'database': db_name, 'table': table, 'partition': '2025-01-01', 'partition_id': '20250101',
+ 'active': 1, 'rows': 334, 'data_uncompressed_bytes': 5936,
+ 'data_compressed_bytes': partitions[1].data_compressed_bytes,
+ 'bytes_on_disk': partitions[1].bytes_on_disk,
+ 'visible': 1,
+ })
+ ],
+ partitions,
+ )
+
+ def test_get_databases(self):
+ databases = self.clickhouse.get_databases()
+ expected = [
+ ClickhouseDbModel(**{
+ 'name': 'default',
+ 'uuid': '',
+ 'engine': 'Atomic',
+ 'data_path': '/var/lib/clickhouse/store/',
+ 'metadata_path': '',
+ 'engine_full': 'Atomic',
+ 'comment': '',
+ }),
+ ClickhouseDbModel(**{
+ 'name': DB.RIPLEY_TESTS.value,
+ 'uuid': '',
+ 'engine': 'Atomic',
+ 'data_path': '/var/lib/clickhouse/store/',
+ 'metadata_path': '',
+ 'engine_full': 'Atomic',
+ 'comment': '',
+ }),
+ ClickhouseDbModel(**{
+ 'name': DB.RIPLEY_TESTS2.value,
+ 'uuid': 'uuid',
+ 'engine': 'Atomic',
+ 'data_path': '/var/lib/clickhouse/store/',
+ 'metadata_path': '',
+ 'engine_full': 'Atomic',
+ 'comment': '',
+ })
+ ]
+
+ for ix, database in enumerate(databases):
+ expected[ix].uuid = database.uuid
+ expected[ix].metadata_path = database.metadata_path
+
+ self.assertEqual(databases, expected)
+
+ @parameterized.expand([
+ [DB.RIPLEY_TESTS.value],
+ [DB.RIPLEY_TESTS2.value],
+ ])
+ def test_get_tables_by_db(self, db_name: str):
+ table1 = 'get_tables_by_db1'
+ table2 = 'get_tables_by_db2'
+ self.create_test_table(table1, db_name)
+ self.create_test_table(table2, db_name)
+ tables = self.clickhouse.get_tables_by_db(db_name)
+
+ self.assertListEqual(
+ [
+ ClickhouseTableModel(**{
+ 'active_parts': 2, 'as_select': '', 'database': db_name,
+ 'dependencies_database': [], 'dependencies_table': [], 'comment': '',
+ 'create_table_query': f'CREATE TABLE {db_name}.{table1} '
+ f'(`key` UInt64, `value` String, `day` Date) '
+ f'ENGINE = MergeTree PARTITION BY day ORDER BY key '
+ f'SETTINGS index_granularity = 8192',
+ 'name': tables[0].name, 'engine': 'MergeTree',
+ 'engine_full': 'MergeTree PARTITION BY day ORDER BY key SETTINGS index_granularity = 8192',
+ 'parts': 2,
+ 'partition_key': 'day', 'primary_key': 'key', 'sampling_key': '', 'storage_policy': 'default',
+ 'sorting_key': 'key', 'is_temporary': 0, 'total_bytes': 12255, 'total_rows': 1000,
+ 'total_marks': 4, 'lifetime_rows': None, 'lifetime_bytes': None, 'has_own_data': 1,
+ 'loading_dependencies_database': [], 'loading_dependencies_table': [],
+ 'loading_dependent_database': [], 'loading_dependent_table': [],
+ 'data_paths': tables[0].data_paths,
+ 'metadata_modification_time': tables[0].metadata_modification_time,
+ 'metadata_path': tables[0].metadata_path,
+ 'uuid': tables[0].uuid,
+ }),
+ ClickhouseTableModel(**{
+ 'active_parts': 2, 'as_select': '', 'database': db_name, 'dependencies_database': [],
+ 'dependencies_table': [], 'comment': '',
+ 'create_table_query':
+ f'CREATE TABLE {db_name}.{table2} '
+ '(`key` UInt64, `value` String, `day` Date) ENGINE = MergeTree PARTITION BY day '
+ 'ORDER BY key SETTINGS index_granularity = 8192',
+ 'name': tables[1].name, 'engine': 'MergeTree',
+ 'engine_full': 'MergeTree PARTITION BY day ORDER BY key SETTINGS index_granularity = 8192',
+ 'parts': 2,
+ 'partition_key': 'day', 'primary_key': 'key', 'sampling_key': '', 'storage_policy': 'default',
+ 'sorting_key': 'key', 'is_temporary': 0, 'total_bytes': 12255, 'total_rows': 1000,
+ 'total_marks': 4, 'lifetime_rows': None, 'lifetime_bytes': None, 'has_own_data': 1,
+ 'loading_dependencies_database': [], 'loading_dependencies_table': [],
+ 'loading_dependent_database': [], 'loading_dependent_table': [],
+ 'data_paths': tables[1].data_paths,
+ 'metadata_modification_time': tables[1].metadata_modification_time,
+ 'metadata_path': tables[1].metadata_path,
+ 'uuid': tables[1].uuid,
+ })
+ ],
+ tables,
+ )
diff --git a/tests/clickhouse/test_clickhouse_table_service.py b/tests/clickhouse/test_clickhouse_table_service.py
new file mode 100644
index 0000000..b0d4610
--- /dev/null
+++ b/tests/clickhouse/test_clickhouse_table_service.py
@@ -0,0 +1,186 @@
+import os
+
+import boto3
+from parameterized import parameterized
+
+from ripley.clickhouse_models.s3_settings import ClickhouseS3SettingsModel
+from tests.clickhouse._base_test import BaseClickhouseTest, DB
+
+_S3_BUCKET = 'ripley'
+_REGION_NAME = 'us-east-1'
+
+
+def _init_s3() -> boto3.client:
+ return boto3.client(
+ 's3',
+ endpoint_url='https://localhost:9001',
+ region_name=_REGION_NAME,
+ use_ssl=False,
+ verify=False,
+ )
+
+
+class TestClickhouseTableService(BaseClickhouseTest):
+ s3 = _init_s3()
+
+ def setUp(self):
+ super().setUp()
+ self.s3 = _init_s3()
+
+ for bucket in self.s3.list_buckets().get('Buckets', []):
+ bucket_name = bucket['Name']
+ files = self.s3.list_objects_v2(Bucket=bucket_name).get('Contents', [])
+ files = [{'Key': f['Key']} for f in files]
+
+ if files:
+ self.s3.delete_objects(Bucket=bucket_name, Delete={'Objects': files})
+
+ self.s3.delete_bucket(Bucket=bucket_name)
+ self.s3.create_bucket(Bucket=_S3_BUCKET)
+
+ @classmethod
+ def setUpClass(cls):
+ os.environ['AWS_ACCESS_KEY_ID'] = 'ripley_key'
+ os.environ['AWS_SECRET_ACCESS_KEY'] = 'ripley_secret'
+
+ @classmethod
+ def tearDownClass(cls):
+ del os.environ['AWS_ACCESS_KEY_ID']
+ del os.environ['AWS_SECRET_ACCESS_KEY']
+
+ @parameterized.expand([
+ [
+ '',
+ '',
+ None,
+ None,
+ '',
+ 'CREATE TABLE ripley_tests_db1.create_table_as1 (`key` UInt64, `day` Date) ENGINE = MergeTree '
+ 'PARTITION BY day ORDER BY key SETTINGS index_granularity = 8192',
+ 'CREATE TABLE ripley_tests_db1.create_table_as2 (`key` UInt64, `day` Date) ENGINE = MergeTree '
+ 'PARTITION BY day ORDER BY key SETTINGS index_granularity = 8192'
+ ],
+ [
+ DB.RIPLEY_TESTS2.value,
+ DB.RIPLEY_TESTS.value,
+ None,
+ None,
+ '',
+ 'CREATE TABLE ripley_tests_db1.create_table_as1 (`key` UInt64, `day` Date) ENGINE = MergeTree '
+ 'PARTITION BY day ORDER BY key SETTINGS index_granularity = 8192',
+ 'CREATE TABLE ripley_tests_db2.create_table_as2 (`key` UInt64, `day` Date) ENGINE = MergeTree '
+ 'PARTITION BY day ORDER BY key SETTINGS index_granularity = 8192'
+ ],
+ [
+ '',
+ DB.RIPLEY_TESTS2.value,
+ ['day'],
+ ['key'],
+ 'AggregatingMergeTree',
+ 'CREATE TABLE ripley_tests_db2.create_table_as1 (`key` UInt64, `day` Date) ENGINE = MergeTree '
+ 'PARTITION BY day ORDER BY key SETTINGS index_granularity = 8192',
+ 'CREATE TABLE ripley_tests_db1.create_table_as2 (`key` UInt64, `day` Date) ENGINE = AggregatingMergeTree '
+ 'PARTITION BY key ORDER BY day SETTINGS index_granularity = 8192',
+ ],
+ ])
+ def test_create_table_as(
+ self,
+ db: str,
+ from_db: str,
+ order_by: list or None,
+ partition_by: list or None,
+ engine: str,
+ original_ddl: str,
+ target_ddl: str,
+ ):
+ from_table_name = 'create_table_as1'
+ self.clickhouse.exec(f"""CREATE TABLE {self.get_full_table_name(from_table_name, from_db)}
+ (
+ key UInt64,
+ day Date
+ )
+ ENGINE MergeTree()
+ PARTITION BY day
+ ORDER BY key
+ """)
+
+ target_table = 'create_table_as2'
+ from_table = self.clickhouse.get_table_by_name(from_table_name, from_db)
+ new_table = self.clickhouse.create_table_as(
+ table=target_table,
+ from_table=from_table,
+ db=db,
+ order_by=order_by,
+ engine=engine,
+ partition_by=partition_by,
+ )
+
+ self.assertEqual(original_ddl, from_table.create_table_query)
+ self.assertEqual(target_ddl, new_table.create_table_query)
+
+ @parameterized.expand([
+ ['', ''],
+ [DB.RIPLEY_TESTS.value, DB.RIPLEY_TESTS2.value],
+ [DB.RIPLEY_TESTS2.value, DB.RIPLEY_TESTS.value],
+ ])
+ def test_insert_from_table(self, from_db: str, to_db: str):
+ from_table_name = 'insert_from_table'
+ to_table_name = 'insert_from_table2'
+
+ self.create_test_table(from_table_name, from_db)
+
+ from_table = self.clickhouse.get_table_by_name(from_table_name, from_db)
+ to_table = self.clickhouse.create_table_as(
+ table=to_table_name,
+ from_table=from_table,
+ db=to_db,
+ )
+
+ self.clickhouse.insert_from_table(from_table=from_table, to_table=to_table)
+
+ result = self.clickhouse.exec(f"""
+ SELECT count() AS rows FROM {from_table.full_name}
+ UNION ALL
+ SELECT count() AS rows FROM {to_table.full_name}
+ """)
+
+ self.assertEqual(result, [(1000, ), (1000, )])
+
+ @parameterized.expand([
+ [DB.RIPLEY_TESTS.value],
+ [DB.RIPLEY_TESTS2.value],
+ ])
+ def test_truncate(self, db_name: str):
+ table_name = 'truncate_table'
+ self.create_test_table(table_name, db_name)
+ self.clickhouse.truncate(table_name, db_name)
+
+ result = self.clickhouse.exec(f'SELECT * FROM {self.get_full_table_name(table_name, db_name)}')
+ self.assertListEqual([], result)
+
+ @parameterized.expand([
+ [DB.RIPLEY_TESTS.value],
+ [DB.RIPLEY_TESTS2.value],
+ ])
+ def test_insert_from_s3(self, db_name: str):
+ def _get_table_results(_table: str) -> tuple:
+ return self.clickhouse.exec(f'SELECT count() as records FROM {_table}')
+
+ to_s3_name = 'insert_to_s3'
+ from_s3_name = 'insert_from_s3'
+ settings = ClickhouseS3SettingsModel(url='http://localhost:9001/ripley/ripley_s3_test4')
+
+ self.create_test_table(to_s3_name, db_name)
+
+ from_table = self.clickhouse.get_table_by_name(to_s3_name, db_name)
+ from_s3_table = self.clickhouse.create_table_as(table=from_s3_name, from_table=from_table, db=db_name)
+ to_s3_table = self.clickhouse.get_table_by_name(to_s3_name, db_name)
+
+ self.clickhouse.insert_table_to_s3(table=to_s3_table, s3_settings=settings)
+ self.clickhouse.insert_from_s3(table=from_s3_table, s3_settings=settings)
+
+ result1 = _get_table_results(to_s3_table.full_name)
+ result2 = _get_table_results(from_table.full_name)
+
+ self.assertEqual(result1, [(1000, )])
+ self.assertEqual(result2, [(1000, )])