diff --git a/.github/workflows/check_version.yaml b/.github/workflows/check_version.yaml index 6405844..b5b709c 100644 --- a/.github/workflows/check_version.yaml +++ b/.github/workflows/check_version.yaml @@ -12,9 +12,9 @@ jobs: - name: SCM Checkout uses: actions/checkout@v3 - name: Setup Python & Poetry Environment - uses: exasol/python-toolbox/.github/actions/python-environment@0.12.0 + uses: exasol/python-toolbox/.github/actions/python-environment@0.15.0 with: python-version: "3.10" poetry-version: '1.8.2' - name: Check Release - run: ./scripts/build/check_release.sh "python3.10" + run: poetry run python3 scripts/build/check_release.py diff --git a/.github/workflows/checks.yaml b/.github/workflows/checks.yaml new file mode 100644 index 0000000..7d6f9eb --- /dev/null +++ b/.github/workflows/checks.yaml @@ -0,0 +1,38 @@ +name: Checks + +on: + pull_request: + +jobs: + + lint-job: + name: Linting and Type checks (Python-${{ matrix.python-version }}) + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ["3.10", "3.11", "3.12"] + + steps: + - name: SCM Checkout + uses: actions/checkout@v4 + + - name: Setup Python & Poetry Environment + uses: exasol/python-toolbox/.github/actions/python-environment@0.15.0 + with: + python-version: ${{ matrix.python-version }} + + - name: Run Unit Tests + run: poetry run nox -s unit-tests + + - name: Run Lint + run: poetry run nox -s lint + + - name: Run type-check + run: poetry run nox -s type-check + + - name: Upload Artifacts + uses: actions/upload-artifact@v4 + with: + name: ".lint-python-${{ matrix.python-version }}.txt" + path: .lint.txt \ No newline at end of file diff --git a/.github/workflows/run_ci_test.yaml b/.github/workflows/ci.yaml similarity index 81% rename from .github/workflows/run_ci_test.yaml rename to .github/workflows/ci.yaml index b9457cf..b3dff2e 100644 --- a/.github/workflows/run_ci_test.yaml +++ b/.github/workflows/ci.yaml @@ -1,4 +1,4 @@ -name: Run Unit Tests +name: Run Integration Tests on: push: @@ -14,9 +14,9 @@ jobs: - name: SCM Checkout uses: actions/checkout@v4 - name: Setup Python & Poetry Environment - uses: exasol/python-toolbox/.github/actions/python-environment@0.12.0 + uses: exasol/python-toolbox/.github/actions/python-environment@0.15.0 with: python-version: "3.10" poetry-version: '1.8.2' - name: Run pytest - run: poetry run pytest + run: poetry run nox -s integration-tests diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 0f1f571..015e2d6 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -17,7 +17,7 @@ jobs: uses: actions/checkout@v3 - name: Setup Python & Poetry Environment - uses: exasol/python-toolbox/.github/actions/python-environment@0.12.0 + uses: exasol/python-toolbox/.github/actions/python-environment@0.15.0 with: python-version: "3.10" poetry-version: '1.8.2' diff --git a/.github/workflows/shellcheck.yaml b/.github/workflows/shellcheck.yaml deleted file mode 100644 index a29939e..0000000 --- a/.github/workflows/shellcheck.yaml +++ /dev/null @@ -1,15 +0,0 @@ -name: Check bash scripts - -on: - push: - branches: - - main - pull_request: - -jobs: - shellcheck: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Run shellcheck - run: ./scripts/build/shellcheck.sh diff --git a/.gitignore b/.gitignore index 5d54a4d..e8383e2 100644 --- a/.gitignore +++ b/.gitignore @@ -2,4 +2,6 @@ dist .build_output .pytest_cache -__pycache__ \ No newline at end of file +__pycache__ +.lint.json +.lint.txt diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..017e6b8 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,36 @@ +default_stages: [ commit ] +repos: + + - repo: local + hooks: + - id: code-format + name: code-format + types: [ python ] + pass_filenames: false + language: system + entry: poetry run nox -s fix + + - repo: local + hooks: + - id: type-check + name: type-check + types: [ python ] + pass_filenames: false + language: system + entry: poetry run nox -s type-check + + - repo: local + hooks: + - id: lint + name: lint + types: [ python ] + pass_filenames: false + language: system + entry: poetry run nox -s lint + + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: check-yaml + - id: end-of-file-fixer + - id: trailing-whitespace diff --git a/exasol_script_languages_container_ci/__init__.py b/exasol_script_languages_container_ci/__init__.py index 55dee98..99d78d6 100644 --- a/exasol_script_languages_container_ci/__init__.py +++ b/exasol_script_languages_container_ci/__init__.py @@ -1,5 +1,4 @@ - from exasol_script_languages_container_ci.cli.commands import ( run_ci, - run_release + run_release, ) diff --git a/exasol_script_languages_container_ci/cli/commands/run_ci.py b/exasol_script_languages_container_ci/cli/commands/run_ci.py index 8a2c392..2cc7443 100644 --- a/exasol_script_languages_container_ci/cli/commands/run_ci.py +++ b/exasol_script_languages_container_ci/cli/commands/run_ci.py @@ -9,40 +9,48 @@ @cli.command() -@click.option('--flavor', required=True, type=str, - help="Flavor name.") -@click.option('--branch-name', required=True, type=str, - help="Branch name.") -@click.option('--docker-user', required=True, type=str, - help="Docker user name") -@click.option('--docker-password', required=True, type=str, - help="Docker password") -@click.option('--docker-build-repository', required=True, type=str, - help="Docker build repository") -@click.option('--docker-release-repository', required=True, type=str, - help="Docker release repository") -@click.option('--commit-sha', required=True, type=str, - help="Commit SHA") -@click.option('--config-file', required=True, type=click.Path(exists=True, file_okay=True, dir_okay=False), - help="The build config file (project specific)") +@click.option("--flavor", required=True, type=str, help="Flavor name.") +@click.option("--branch-name", required=True, type=str, help="Branch name.") +@click.option("--docker-user", required=True, type=str, help="Docker user name") +@click.option("--docker-password", required=True, type=str, help="Docker password") +@click.option( + "--docker-build-repository", required=True, type=str, help="Docker build repository" +) +@click.option( + "--docker-release-repository", + required=True, + type=str, + help="Docker release repository", +) +@click.option("--commit-sha", required=True, type=str, help="Commit SHA") +@click.option( + "--config-file", + required=True, + type=click.Path(exists=True, file_okay=True, dir_okay=False), + help="The build config file (project specific)", +) @click.pass_context -def run_ci(ctx: click.Context, - flavor: str, - branch_name: str, - docker_user: str, - docker_password: str, - docker_build_repository: str, - docker_release_repository: str, - commit_sha: str, - config_file: str): +def run_ci( + ctx: click.Context, + flavor: str, + branch_name: str, + docker_user: str, + docker_password: str, + docker_build_repository: str, + docker_release_repository: str, + commit_sha: str, + config_file: str, +): logging.basicConfig(level=logging.INFO) build_config = Config.parse_file(config_file) - ci(flavor=flavor, - branch_name=branch_name, - docker_user=docker_user, - docker_password=docker_password, - docker_build_repository=docker_build_repository, - docker_release_repository=docker_release_repository, - commit_sha=commit_sha, - build_config=build_config, - git_access=GitAccess()) + ci( + flavor=flavor, + branch_name=branch_name, + docker_user=docker_user, + docker_password=docker_password, + docker_build_repository=docker_build_repository, + docker_release_repository=docker_release_repository, + commit_sha=commit_sha, + build_config=build_config, + git_access=GitAccess(), + ) diff --git a/exasol_script_languages_container_ci/cli/commands/run_release.py b/exasol_script_languages_container_ci/cli/commands/run_release.py index 26af13e..9d9987a 100644 --- a/exasol_script_languages_container_ci/cli/commands/run_release.py +++ b/exasol_script_languages_container_ci/cli/commands/run_release.py @@ -6,50 +6,69 @@ from exasol_script_languages_container_ci.cli.cli import cli from exasol_script_languages_container_ci.lib.asset_uploader import AssetUploader from exasol_script_languages_container_ci.lib.config.config_data_model import Config -from exasol_script_languages_container_ci.lib.github_release_asset_uploader import GithubReleaseAssetUploader +from exasol_script_languages_container_ci.lib.github_release_asset_uploader import ( + GithubReleaseAssetUploader, +) from exasol_script_languages_container_ci.lib.release import release from exasol_script_languages_container_ci.lib.release_uploader import ReleaseUploader @cli.command() -@click.option('--flavor', required=True, type=str, - help="Flavor name.") -@click.option('--docker-user', required=True, type=str, - help="Docker user name") -@click.option('--docker-password', required=True, type=str, - help="Docker password") -@click.option('--docker-release-repository', required=True, type=str, - help="Docker release repository") -@click.option('--config-file', required=True, type=click.Path(exists=True, file_okay=True, dir_okay=False), - help="The build config file (project specific)") -@click.option('--source-repo-url', required=True, type=str, - help="The url of the repository. Usually set by AWS under env variable CODEBUILD_SOURCE_REPO_URL.") -@click.option('--release-id', required=True, type=int, - help="The id of the release.") -@click.option('--dry-run/--no-dry-run', default=False, - help="If true, runs release without pushing the container to the docker release repository." - "If false, also pushes the container to the docker release repository.") +@click.option("--flavor", required=True, type=str, help="Flavor name.") +@click.option("--docker-user", required=True, type=str, help="Docker user name") +@click.option("--docker-password", required=True, type=str, help="Docker password") +@click.option( + "--docker-release-repository", + required=True, + type=str, + help="Docker release repository", +) +@click.option( + "--config-file", + required=True, + type=click.Path(exists=True, file_okay=True, dir_okay=False), + help="The build config file (project specific)", +) +@click.option( + "--source-repo-url", + required=True, + type=str, + help="The url of the repository. Usually set by AWS under env variable CODEBUILD_SOURCE_REPO_URL.", +) +@click.option("--release-id", required=True, type=int, help="The id of the release.") +@click.option( + "--dry-run/--no-dry-run", + default=False, + help="If true, runs release without pushing the container to the docker release repository." + "If false, also pushes the container to the docker release repository.", +) @click.pass_context -def run_release(ctx: click.Context, - flavor: str, - docker_user: str, - docker_password: str, - docker_release_repository: str, - config_file: str, - source_repo_url: str, - release_id: int, - dry_run: bool): +def run_release( + ctx: click.Context, + flavor: str, + docker_user: str, + docker_password: str, + docker_release_repository: str, + config_file: str, + source_repo_url: str, + release_id: int, + dry_run: bool, +): logging.basicConfig(level=logging.INFO) - github_release_asset_uploader = GithubReleaseAssetUploader(os.getenv("GITHUB_TOKEN")) + github_release_asset_uploader = GithubReleaseAssetUploader( + os.getenv("GITHUB_TOKEN") + ) asset_uploader = AssetUploader(release_asset_uploader=github_release_asset_uploader) release_uploader = ReleaseUploader(asset_uploader=asset_uploader) build_config = Config.parse_file(config_file) - release(flavor=flavor, - docker_user=docker_user, - docker_password=docker_password, - docker_release_repository=docker_release_repository, - build_config=build_config, - source_repo_url=source_repo_url, - release_id=release_id, - release_uploader=release_uploader, - is_dry_run=dry_run) + release( + flavor=flavor, + docker_user=docker_user, + docker_password=docker_password, + docker_release_repository=docker_release_repository, + build_config=build_config, + source_repo_url=source_repo_url, + release_id=release_id, + release_uploader=release_uploader, + is_dry_run=dry_run, + ) diff --git a/exasol_script_languages_container_ci/lib/asset_uploader.py b/exasol_script_languages_container_ci/lib/asset_uploader.py index 2eb1896..1479bc3 100644 --- a/exasol_script_languages_container_ci/lib/asset_uploader.py +++ b/exasol_script_languages_container_ci/lib/asset_uploader.py @@ -2,7 +2,9 @@ import logging from pathlib import Path -from exasol_script_languages_container_ci.lib.github_release_asset_uploader import GithubReleaseAssetUploader +from exasol_script_languages_container_ci.lib.github_release_asset_uploader import ( + GithubReleaseAssetUploader, +) class AssetUploader: @@ -10,17 +12,29 @@ class AssetUploader: def __init__(self, release_asset_uploader: GithubReleaseAssetUploader): self._release_asset_uploader = release_asset_uploader - def upload_assets(self, - repo_id: str, release_id: int, content_type: str, - artifact_path: str, file_suffix: str, label_prefix: str): - release_artifacts = glob.glob(f'{artifact_path}/*{file_suffix}') + def upload_assets( + self, + repo_id: str, + release_id: int, + content_type: str, + artifact_path: str, + file_suffix: str, + label_prefix: str, + ): + release_artifacts = glob.glob(f"{artifact_path}/*{file_suffix}") for release_artifact in release_artifacts: artifact_file_name = Path(release_artifact).name if artifact_file_name.endswith(file_suffix): - artifact_file_name = artifact_file_name[:-len(file_suffix)] + artifact_file_name = artifact_file_name[: -len(file_suffix)] else: - logging.error(f"Artifact file: {artifact_file_name} does not end with {file_suffix}. " - f"Using {artifact_file_name} as label.") - self._release_asset_uploader.upload(archive_path=release_artifact, - label=f"{label_prefix} {artifact_file_name}", - repo_id=repo_id, release_id=release_id, content_type=content_type) + logging.error( + f"Artifact file: {artifact_file_name} does not end with {file_suffix}. " + f"Using {artifact_file_name} as label." + ) + self._release_asset_uploader.upload( + archive_path=release_artifact, + label=f"{label_prefix} {artifact_file_name}", + repo_id=repo_id, + release_id=release_id, + content_type=content_type, + ) diff --git a/exasol_script_languages_container_ci/lib/branch_config.py b/exasol_script_languages_container_ci/lib/branch_config.py index 750f49a..deed8ac 100644 --- a/exasol_script_languages_container_ci/lib/branch_config.py +++ b/exasol_script_languages_container_ci/lib/branch_config.py @@ -1,5 +1,8 @@ -from enum import Enum, auto import re +from enum import ( + Enum, + auto, +) class BuildSteps(Enum): @@ -9,14 +12,26 @@ class BuildSteps(Enum): class BranchConfig(Enum): - DEVELOP = {BuildSteps.BUILD_ALL_ALWAYS: True, BuildSteps.REBUILD: True, - BuildSteps.PUSH_TO_DOCKER_RELEASE_REPO: False} - MAIN = {BuildSteps.BUILD_ALL_ALWAYS: True, BuildSteps.REBUILD: True, - BuildSteps.PUSH_TO_DOCKER_RELEASE_REPO: True} - REBUILD = {BuildSteps.BUILD_ALL_ALWAYS: True, BuildSteps.REBUILD: True, - BuildSteps.PUSH_TO_DOCKER_RELEASE_REPO: False} - OTHER = {BuildSteps.BUILD_ALL_ALWAYS: False, BuildSteps.REBUILD: False, - BuildSteps.PUSH_TO_DOCKER_RELEASE_REPO: False} + DEVELOP = { + BuildSteps.BUILD_ALL_ALWAYS: True, + BuildSteps.REBUILD: True, + BuildSteps.PUSH_TO_DOCKER_RELEASE_REPO: False, + } + MAIN = { + BuildSteps.BUILD_ALL_ALWAYS: True, + BuildSteps.REBUILD: True, + BuildSteps.PUSH_TO_DOCKER_RELEASE_REPO: True, + } + REBUILD = { + BuildSteps.BUILD_ALL_ALWAYS: True, + BuildSteps.REBUILD: True, + BuildSteps.PUSH_TO_DOCKER_RELEASE_REPO: False, + } + OTHER = { + BuildSteps.BUILD_ALL_ALWAYS: False, + BuildSteps.REBUILD: False, + BuildSteps.PUSH_TO_DOCKER_RELEASE_REPO: False, + } @staticmethod def build_always(branch_name: str) -> bool: @@ -28,16 +43,20 @@ def rebuild(branch_name) -> bool: @staticmethod def push_to_docker_release_repo(branch_name: str) -> bool: - return get_branch_config(branch_name).value[BuildSteps.PUSH_TO_DOCKER_RELEASE_REPO] + return get_branch_config(branch_name).value[ + BuildSteps.PUSH_TO_DOCKER_RELEASE_REPO + ] def get_branch_config(branch_name: str) -> BranchConfig: - matches = ((re.compile(r"refs/heads/(master|main)"), BranchConfig.MAIN), - (re.compile(r"refs/heads/develop"), BranchConfig.DEVELOP), - (re.compile(r"refs/heads/rebuild/.*"), BranchConfig.REBUILD)) + matches = ( + (re.compile(r"refs/heads/(master|main)"), BranchConfig.MAIN), + (re.compile(r"refs/heads/develop"), BranchConfig.DEVELOP), + (re.compile(r"refs/heads/rebuild/.*"), BranchConfig.REBUILD), + ) branch_cfg = BranchConfig.OTHER - for (branch_regex, branch_config) in matches: + for branch_regex, branch_config in matches: if branch_regex.match(branch_name): branch_cfg = branch_config break diff --git a/exasol_script_languages_container_ci/lib/ci.py b/exasol_script_languages_container_ci/lib/ci.py index 329d4fb..dda48a7 100644 --- a/exasol_script_languages_container_ci/lib/ci.py +++ b/exasol_script_languages_container_ci/lib/ci.py @@ -1,5 +1,8 @@ import logging -from typing import Set, Callable +from typing import ( + Callable, + Set, +) from exasol_script_languages_container_ci.lib.branch_config import BranchConfig from exasol_script_languages_container_ci.lib.ci_build import CIBuild @@ -13,7 +16,7 @@ def get_all_affected_files(git_access: GitAccess, base_branch: str) -> Set[str]: base_last_commit_sha = git_access.get_head_commit_sha_of_branch(base_branch) - changed_files = set() + changed_files = set() # type: ignore for commit in git_access.get_last_commits(): if commit == base_last_commit_sha: break @@ -21,39 +24,52 @@ def get_all_affected_files(git_access: GitAccess, base_branch: str) -> Set[str]: return changed_files -def check_if_need_to_build(branch_name: str, config: Config, flavor: str, git_access: GitAccess): +def check_if_need_to_build( + branch_name: str, config: Config, flavor: str, git_access: GitAccess +): if BranchConfig.build_always(branch_name): return True if "[rebuild]" in git_access.get_last_commit_message(): return True affected_files = list(get_all_affected_files(git_access, config.build.base_branch)) - logging.debug(f"check_if_need_to_build: Found files of last commits: {affected_files}") + logging.debug( + f"check_if_need_to_build: Found files of last commits: {affected_files}" + ) for ignore_path in config.build.ignore.paths: - affected_files = list(filter(lambda file: not file.startswith(ignore_path), affected_files)) + affected_files = list( + filter(lambda file: not file.startswith(ignore_path), affected_files) + ) if len(affected_files) > 0: # Now filter out also other flavor folders this_flavor_path = f"flavors/{flavor}" - affected_files = list(filter(lambda file: not file.startswith("flavors") or file.startswith(this_flavor_path), - affected_files)) + affected_files = list( + filter( + lambda file: not file.startswith("flavors") + or file.startswith(this_flavor_path), + affected_files, + ) + ) logging.debug(f"check_if_need_to_build: filtered files: {affected_files}") return len(affected_files) > 0 -def ci(flavor: str, - branch_name: str, - docker_user: str, - docker_password: str, - docker_build_repository: str, - docker_release_repository: str, - commit_sha: str, - build_config: Config, - git_access: GitAccess, - ci_build: CIBuild = CIBuild(), - ci_execute_tests: CIExecuteTest = CIExecuteTest(), - ci_push: CIPush = CIPush(), - ci_security_scan: CISecurityScan = CISecurityScan(), - ci_prepare: CIPrepare = CIPrepare()): +def ci( + flavor: str, + branch_name: str, + docker_user: str, + docker_password: str, + docker_build_repository: str, + docker_release_repository: str, + commit_sha: str, + build_config: Config, + git_access: GitAccess, + ci_build: CIBuild = CIBuild(), + ci_execute_tests: CIExecuteTest = CIExecuteTest(), + ci_push: CIPush = CIPush(), + ci_security_scan: CISecurityScan = CISecurityScan(), + ci_prepare: CIPrepare = CIPrepare(), +): """ Run CI build: 1. Build image @@ -66,36 +82,48 @@ def ci(flavor: str, flavor_path = (f"flavors/{flavor}",) test_container_folder = "test_container" rebuild = BranchConfig.rebuild(branch_name) - needs_to_build = check_if_need_to_build(branch_name, build_config, flavor, git_access) + needs_to_build = check_if_need_to_build( + branch_name, build_config, flavor, git_access + ) if needs_to_build: ci_prepare.prepare() - ci_build.build(flavor_path=flavor_path, - rebuild=rebuild, - build_docker_repository=docker_build_repository, - commit_sha=commit_sha, - docker_user=docker_user, - docker_password=docker_password, - test_container_folder=test_container_folder) - ci_execute_tests.execute_tests(flavor_path=flavor_path, - docker_user=docker_user, - docker_password=docker_password, - test_container_folder=test_container_folder) + ci_build.build( + flavor_path=flavor_path, + rebuild=rebuild, + build_docker_repository=docker_build_repository, + commit_sha=commit_sha, + docker_user=docker_user, + docker_password=docker_password, + test_container_folder=test_container_folder, + ) + ci_execute_tests.execute_tests( + flavor_path=flavor_path, + docker_user=docker_user, + docker_password=docker_password, + test_container_folder=test_container_folder, + ) ci_security_scan.run_security_scan(flavor_path=flavor_path) - ci_push.push(flavor_path=flavor_path, - target_docker_repository=docker_build_repository, - target_docker_tag_prefix=commit_sha, - docker_user=docker_user, - docker_password=docker_password) - ci_push.push(flavor_path=flavor_path, - target_docker_repository=docker_build_repository, - target_docker_tag_prefix="", - docker_user=docker_user, - docker_password=docker_password) + ci_push.push( + flavor_path=flavor_path, + target_docker_repository=docker_build_repository, + target_docker_tag_prefix=commit_sha, + docker_user=docker_user, + docker_password=docker_password, + ) + ci_push.push( + flavor_path=flavor_path, + target_docker_repository=docker_build_repository, + target_docker_tag_prefix="", + docker_user=docker_user, + docker_password=docker_password, + ) if BranchConfig.push_to_docker_release_repo(branch_name): - ci_push.push(flavor_path=flavor_path, - target_docker_repository=docker_release_repository, - target_docker_tag_prefix="", - docker_user=docker_user, - docker_password=docker_password) + ci_push.push( + flavor_path=flavor_path, + target_docker_repository=docker_release_repository, + target_docker_tag_prefix="", + docker_user=docker_user, + docker_password=docker_password, + ) else: logging.warning(f"Skipping build...") diff --git a/exasol_script_languages_container_ci/lib/ci_build.py b/exasol_script_languages_container_ci/lib/ci_build.py index f234d7f..3ccac02 100644 --- a/exasol_script_languages_container_ci/lib/ci_build.py +++ b/exasol_script_languages_container_ci/lib/ci_build.py @@ -1,54 +1,73 @@ import logging -from typing import Tuple, Optional +from typing import ( + Optional, + Tuple, +) -from exasol_integration_test_docker_environment.lib.api.build_test_container import build_test_container from exasol.slc.api import build -from exasol.slc.internal.tasks.test.test_container_content import build_test_container_content +from exasol.slc.internal.tasks.test.test_container_content import ( + build_test_container_content, +) +from exasol_integration_test_docker_environment.lib.api.build_test_container import ( + build_test_container, +) -from exasol_script_languages_container_ci.lib.ci_step_output_printer import CIStepOutputPrinterProtocol, \ - CIStepOutputPrinter +from exasol_script_languages_container_ci.lib.ci_step_output_printer import ( + CIStepOutputPrinter, + CIStepOutputPrinterProtocol, +) class CIBuild: - def __init__(self, printer: CIStepOutputPrinterProtocol = CIStepOutputPrinter(logging.info)): + def __init__( + self, printer: CIStepOutputPrinterProtocol = CIStepOutputPrinter(logging.info) + ): self._printer = printer - def build(self, - flavor_path: Tuple[str, ...], - rebuild: bool, - build_docker_repository: Optional[str], - commit_sha: str, - docker_user: str, - docker_password: str, - test_container_folder: str): + def build( + self, + flavor_path: Tuple[str, ...], + rebuild: bool, + build_docker_repository: Optional[str], + commit_sha: str, + docker_user: str, + docker_password: str, + test_container_folder: str, + ): """ Build the script-language container for given flavor. And also build the test container """ logging.info(f"Running command 'build' with parameters: {locals()}") if build_docker_repository is None: - slc_image_infos = build(flavor_path=flavor_path, force_rebuild=rebuild, - source_docker_tag_prefix=commit_sha, - source_docker_username=docker_user, - source_docker_password=docker_password, - shortcut_build=False, - workers=7, - log_level="WARNING", - use_job_specific_log_file=True - ) + slc_image_infos = build( + flavor_path=flavor_path, + force_rebuild=rebuild, + source_docker_tag_prefix=commit_sha, + source_docker_username=docker_user, + source_docker_password=docker_password, + shortcut_build=False, + workers=7, + log_level="WARNING", + use_job_specific_log_file=True, + ) else: - slc_image_infos = build(flavor_path=flavor_path, force_rebuild=rebuild, - source_docker_repository_name=build_docker_repository, - source_docker_tag_prefix=commit_sha, - source_docker_username=docker_user, - source_docker_password=docker_password, - shortcut_build=False, - workers=7, - log_level="WARNING", - use_job_specific_log_file=True - ) - logging.info(f"Running command 'build_test_container' with parameters: {locals()}") + slc_image_infos = build( + flavor_path=flavor_path, + force_rebuild=rebuild, + source_docker_repository_name=build_docker_repository, + source_docker_tag_prefix=commit_sha, + source_docker_username=docker_user, + source_docker_password=docker_password, + shortcut_build=False, + workers=7, + log_level="WARNING", + use_job_specific_log_file=True, + ) + logging.info( + f"Running command 'build_test_container' with parameters: {locals()}" + ) content = build_test_container_content(test_container_folder) test_container_image_infos = build_test_container( force_rebuild=rebuild, @@ -57,6 +76,6 @@ def build(self, source_docker_repository_name=build_docker_repository, source_docker_tag_prefix=commit_sha, log_level="WARNING", - use_job_specific_log_file=True + use_job_specific_log_file=True, ) - self._printer.print_exasol_docker_images() \ No newline at end of file + self._printer.print_exasol_docker_images() diff --git a/exasol_script_languages_container_ci/lib/ci_export.py b/exasol_script_languages_container_ci/lib/ci_export.py index afc2272..c0f292f 100644 --- a/exasol_script_languages_container_ci/lib/ci_export.py +++ b/exasol_script_languages_container_ci/lib/ci_export.py @@ -3,27 +3,30 @@ from exasol.slc.api import export -from exasol_script_languages_container_ci.lib.ci_step_output_printer import CIStepOutputPrinterProtocol, \ - CIStepOutputPrinter +from exasol_script_languages_container_ci.lib.ci_step_output_printer import ( + CIStepOutputPrinter, + CIStepOutputPrinterProtocol, +) class CIExport: - def __init__(self, printer: CIStepOutputPrinterProtocol = CIStepOutputPrinter(logging.info)): + def __init__( + self, printer: CIStepOutputPrinterProtocol = CIStepOutputPrinter(logging.info) + ): self._printer = printer - def export(self, - flavor_path: Tuple[str, ...], - export_path: str): + def export(self, flavor_path: Tuple[str, ...], export_path: str): """ Export the flavor as tar.gz file """ logging.info(f"Running command 'push' with parameters: {locals()}") - export_result = export(flavor_path=flavor_path, - export_path=export_path, - workers=7, - log_level="WARNING", - use_job_specific_log_file=True - ) + export_result = export( + flavor_path=flavor_path, + export_path=export_path, + workers=7, + log_level="WARNING", + use_job_specific_log_file=True, + ) self._printer.print_exasol_docker_images() diff --git a/exasol_script_languages_container_ci/lib/ci_prepare.py b/exasol_script_languages_container_ci/lib/ci_prepare.py index d921d67..326ef19 100644 --- a/exasol_script_languages_container_ci/lib/ci_prepare.py +++ b/exasol_script_languages_container_ci/lib/ci_prepare.py @@ -1,7 +1,9 @@ import os from pathlib import Path -from exasol_integration_test_docker_environment.cli.options.system_options import DEFAULT_OUTPUT_DIRECTORY +from exasol_integration_test_docker_environment.cli.options.system_options import ( + DEFAULT_OUTPUT_DIRECTORY, +) from exasol_integration_test_docker_environment.lib.base import luigi_log_config diff --git a/exasol_script_languages_container_ci/lib/ci_push.py b/exasol_script_languages_container_ci/lib/ci_push.py index d6ff1a8..95a30fe 100644 --- a/exasol_script_languages_container_ci/lib/ci_push.py +++ b/exasol_script_languages_container_ci/lib/ci_push.py @@ -3,35 +3,42 @@ from exasol.slc.api.push import push -from exasol_script_languages_container_ci.lib.ci_step_output_printer import CIStepOutputPrinterProtocol, \ - CIStepOutputPrinter +from exasol_script_languages_container_ci.lib.ci_step_output_printer import ( + CIStepOutputPrinter, + CIStepOutputPrinterProtocol, +) class CIPush: - def __init__(self, printer: CIStepOutputPrinterProtocol = CIStepOutputPrinter(logging.info)): + def __init__( + self, printer: CIStepOutputPrinterProtocol = CIStepOutputPrinter(logging.info) + ): self._printer = printer - def push(self, - flavor_path: Tuple[str, ...], - target_docker_repository: str, - target_docker_tag_prefix: str, - docker_user: str, - docker_password: str): + def push( + self, + flavor_path: Tuple[str, ...], + target_docker_repository: str, + target_docker_tag_prefix: str, + docker_user: str, + docker_password: str, + ): """ Push the docker image to Dockerhub """ logging.info(f"Running command 'push' with parameters: {locals()}") - push(flavor_path=flavor_path, - push_all=True, - force_push=True, - workers=7, - target_docker_repository_name=target_docker_repository, - target_docker_tag_prefix=target_docker_tag_prefix, - target_docker_username=docker_user, - target_docker_password=docker_password, - log_level="WARNING", - use_job_specific_log_file=True - ) + push( + flavor_path=flavor_path, + push_all=True, + force_push=True, + workers=7, + target_docker_repository_name=target_docker_repository, + target_docker_tag_prefix=target_docker_tag_prefix, + target_docker_username=docker_user, + target_docker_password=docker_password, + log_level="WARNING", + use_job_specific_log_file=True, + ) self._printer.print_exasol_docker_images() diff --git a/exasol_script_languages_container_ci/lib/ci_security_scan.py b/exasol_script_languages_container_ci/lib/ci_security_scan.py index 7b58157..cadf232 100644 --- a/exasol_script_languages_container_ci/lib/ci_security_scan.py +++ b/exasol_script_languages_container_ci/lib/ci_security_scan.py @@ -4,27 +4,31 @@ from exasol.slc.api import security_scan -from exasol_script_languages_container_ci.lib.ci_step_output_printer import CIStepOutputPrinterProtocol, \ - CIStepOutputPrinter +from exasol_script_languages_container_ci.lib.ci_step_output_printer import ( + CIStepOutputPrinter, + CIStepOutputPrinterProtocol, +) class CISecurityScan: - def __init__(self, printer: CIStepOutputPrinterProtocol = CIStepOutputPrinter(logging.info)): + def __init__( + self, printer: CIStepOutputPrinterProtocol = CIStepOutputPrinter(logging.info) + ): self._printer = printer - def run_security_scan(self, - flavor_path: Tuple[str, ...]): + def run_security_scan(self, flavor_path: Tuple[str, ...]): """ Run security scan and print result """ logging.info(f"Running command 'security_scan' with parameters {locals()}") - security_scan_result = security_scan(flavor_path=flavor_path, - workers=7, - log_level="WARNING", - use_job_specific_log_file=True - ) + security_scan_result = security_scan( + flavor_path=flavor_path, + workers=7, + log_level="WARNING", + use_job_specific_log_file=True, + ) logging.info("============= SECURITY REPORT ===========") self._printer.print_file(Path(security_scan_result.report_path)) self._printer.print_exasol_docker_images() diff --git a/exasol_script_languages_container_ci/lib/ci_step_output_printer.py b/exasol_script_languages_container_ci/lib/ci_step_output_printer.py index 40392ae..15b413f 100644 --- a/exasol_script_languages_container_ci/lib/ci_step_output_printer.py +++ b/exasol_script_languages_container_ci/lib/ci_step_output_printer.py @@ -1,6 +1,9 @@ from inspect import cleandoc from pathlib import Path -from typing import Callable, Protocol +from typing import ( + Callable, + Protocol, +) import docker @@ -17,7 +20,9 @@ def print_file(self, filename: Path): def _get_exasol_docker_images(): docker_client = docker.from_env() try: - exa_images = [str(img) for img in docker_client.images.list() if "exasol" in str(img)] + exa_images = [ + str(img) for img in docker_client.images.list() if "exasol" in str(img) + ] return exa_images finally: docker_client.close() @@ -35,17 +40,19 @@ def print_exasol_docker_images(self): :return: None """ - self._writer(cleandoc(""" + self._writer( + cleandoc( + """ {seperator} Printing docker images {seperator} - {images}""").format( - seperator=20 * "=", images="\n".join(_get_exasol_docker_images()) - )) + {images}""" + ).format(seperator=20 * "=", images="\n".join(_get_exasol_docker_images())) + ) def print_file(self, filename: Path): """ Print the file's content to the writer. """ - with open(filename, "r") as f: + with open(filename) as f: self._writer(f.read()) diff --git a/exasol_script_languages_container_ci/lib/ci_test.py b/exasol_script_languages_container_ci/lib/ci_test.py index 21728b0..c56d26d 100644 --- a/exasol_script_languages_container_ci/lib/ci_test.py +++ b/exasol_script_languages_container_ci/lib/ci_test.py @@ -1,34 +1,43 @@ import logging -from typing import Tuple, Protocol +from typing import ( + Protocol, + Tuple, +) from exasol.slc.api.run_db_tests import run_db_test from exasol.slc.models.test_result import AllTestsResult -from exasol_script_languages_container_ci.lib.ci_step_output_printer import CIStepOutputPrinterProtocol, \ - CIStepOutputPrinter +from exasol_script_languages_container_ci.lib.ci_step_output_printer import ( + CIStepOutputPrinter, + CIStepOutputPrinterProtocol, +) class DBTestRunnerProtocol(Protocol): - def run(self, - flavor_path: Tuple[str, ...], - release_goal: Tuple[str, ...], - test_folder: Tuple[str, ...], - test_container_folder: str, - workers: int, - docker_username: str, - docker_password: str) -> AllTestsResult: + def run( + self, + flavor_path: Tuple[str, ...], + release_goal: Tuple[str, ...], + test_folder: Tuple[str, ...], + test_container_folder: str, + workers: int, + docker_username: str, + docker_password: str, + ) -> AllTestsResult: raise NotImplementedError() class DBTestRunner(DBTestRunnerProtocol): - def run(self, - flavor_path: Tuple[str, ...], - release_goal: Tuple[str, ...], - test_folder: Tuple[str, ...], - test_container_folder: str, - workers: int, - docker_username: str, - docker_password: str) -> AllTestsResult: + def run( + self, + flavor_path: Tuple[str, ...], + release_goal: Tuple[str, ...], + test_folder: Tuple[str, ...], + test_container_folder: str, + workers: int, + docker_username: str, + docker_password: str, + ) -> AllTestsResult: return run_db_test( flavor_path=flavor_path, release_goal=release_goal, @@ -38,67 +47,85 @@ def run(self, source_docker_username=docker_username, source_docker_password=docker_password, log_level="WARNING", - use_job_specific_log_file=True + use_job_specific_log_file=True, ) class CIExecuteTest: - def __init__(self, - db_test_runner: DBTestRunnerProtocol = DBTestRunner(), - printer: CIStepOutputPrinterProtocol = CIStepOutputPrinter(logging.info)): + def __init__( + self, + db_test_runner: DBTestRunnerProtocol = DBTestRunner(), + printer: CIStepOutputPrinterProtocol = CIStepOutputPrinter(logging.info), + ): self._db_test_runner = db_test_runner self._printer = printer - def execute_tests(self, - flavor_path: Tuple[str, ...], - docker_user: str, - docker_password: str, - test_container_folder: str): + def execute_tests( + self, + flavor_path: Tuple[str, ...], + docker_user: str, + docker_password: str, + test_container_folder: str, + ): """ Run db tests """ - db_tests_are_ok = self.run_db_tests(flavor_path=flavor_path, - docker_user=docker_user, - docker_password=docker_password, - test_container_folder=test_container_folder) - linker_namespace_tests_are_ok = self.run_linker_namespace_tests(flavor_path=flavor_path, - docker_user=docker_user, - docker_password=docker_password, - test_container_folder=test_container_folder) + db_tests_are_ok = self.run_db_tests( + flavor_path=flavor_path, + docker_user=docker_user, + docker_password=docker_password, + test_container_folder=test_container_folder, + ) + linker_namespace_tests_are_ok = self.run_linker_namespace_tests( + flavor_path=flavor_path, + docker_user=docker_user, + docker_password=docker_password, + test_container_folder=test_container_folder, + ) self._printer.print_exasol_docker_images() tests_are_ok = db_tests_are_ok and linker_namespace_tests_are_ok if not tests_are_ok: raise AssertionError("Not all tests are ok!") - def run_db_tests(self, flavor_path: Tuple[str, ...], - docker_user: str, - docker_password: str, - test_container_folder: str) -> bool: + def run_db_tests( + self, + flavor_path: Tuple[str, ...], + docker_user: str, + docker_password: str, + test_container_folder: str, + ) -> bool: logging.info(f"Running command 'run_db_test' for flavor-path {flavor_path}") - db_test_result = \ - self._db_test_runner.run(flavor_path=flavor_path, - test_folder=tuple(), - release_goal=('release',), - workers=7, - docker_username=docker_user, - docker_password=docker_password, - test_container_folder=test_container_folder) + db_test_result = self._db_test_runner.run( + flavor_path=flavor_path, + test_folder=tuple(), + release_goal=("release",), + workers=7, + docker_username=docker_user, + docker_password=docker_password, + test_container_folder=test_container_folder, + ) self._printer.print_file(db_test_result.command_line_output_path) return db_test_result.tests_are_ok - def run_linker_namespace_tests(self, - flavor_path: Tuple[str, ...], - docker_user: str, - docker_password: str, - test_container_folder: str) -> bool: - logging.info(f"Running command 'run_db_test' for linker_namespace_sanity for flavor-path {flavor_path}") - linker_namespace_test_result = \ - self._db_test_runner.run(flavor_path=flavor_path, workers=7, - test_folder=("test/linker_namespace_sanity",), - release_goal=("base_test_build_run",), - docker_username=docker_user, - docker_password=docker_password, - test_container_folder=test_container_folder) + def run_linker_namespace_tests( + self, + flavor_path: Tuple[str, ...], + docker_user: str, + docker_password: str, + test_container_folder: str, + ) -> bool: + logging.info( + f"Running command 'run_db_test' for linker_namespace_sanity for flavor-path {flavor_path}" + ) + linker_namespace_test_result = self._db_test_runner.run( + flavor_path=flavor_path, + workers=7, + test_folder=("test/linker_namespace_sanity",), + release_goal=("base_test_build_run",), + docker_username=docker_user, + docker_password=docker_password, + test_container_folder=test_container_folder, + ) self._printer.print_file(linker_namespace_test_result.command_line_output_path) return linker_namespace_test_result.tests_are_ok diff --git a/exasol_script_languages_container_ci/lib/common.py b/exasol_script_languages_container_ci/lib/common.py index 4fc93ae..b909599 100644 --- a/exasol_script_languages_container_ci/lib/common.py +++ b/exasol_script_languages_container_ci/lib/common.py @@ -1,8 +1,8 @@ import json from contextlib import contextmanager +from inspect import cleandoc from pathlib import Path from typing import Callable -from inspect import cleandoc import docker @@ -12,5 +12,5 @@ def get_config(config_file: str): """ Opens config file and returns parsed JSON object. """ - with open(config_file, "r") as f: + with open(config_file) as f: yield json.load(f) diff --git a/exasol_script_languages_container_ci/lib/config/data_model_generator.py b/exasol_script_languages_container_ci/lib/config/data_model_generator.py index 8916502..eea81ee 100644 --- a/exasol_script_languages_container_ci/lib/config/data_model_generator.py +++ b/exasol_script_languages_container_ci/lib/config/data_model_generator.py @@ -3,7 +3,10 @@ from pathlib import Path from tempfile import TemporaryDirectory -from datamodel_code_generator import generate, InputFileType +from datamodel_code_generator import ( + InputFileType, + generate, +) from exasol_script_languages_container_ci.lib.render_template import render_template @@ -21,12 +24,17 @@ def generate_config_data_model(output_file: Path) -> Path: schema_json = json.dumps(schema_dict) with TemporaryDirectory() as directory: temp_output_file = Path(directory) / CONFIG_DATA_MODEL_FILE_NAME - generate(schema_json, input_file_type=InputFileType.JsonSchema, output=temp_output_file, - class_name="Config", apply_default_values_for_required_fields=True) + generate( + schema_json, + input_file_type=InputFileType.JsonSchema, + output=temp_output_file, + class_name="Config", + apply_default_values_for_required_fields=True, + ) with temp_output_file.open("rt") as temp_output_file_handle: with output_file.open("wt") as output_file_handle: lines = (line for line in temp_output_file_handle) - lines = filter(lambda line: "# timestamp: " not in line, lines) + lines = filter(lambda line: "# timestamp: " not in line, lines) # type: ignore for line in lines: output_file_handle.write(line) return output_file diff --git a/exasol_script_languages_container_ci/lib/git_access.py b/exasol_script_languages_container_ci/lib/git_access.py index b342d08..6b58d62 100644 --- a/exasol_script_languages_container_ci/lib/git_access.py +++ b/exasol_script_languages_container_ci/lib/git_access.py @@ -18,7 +18,7 @@ def get_head_commit_sha_of_branch(self, branch_name) -> str: :raise: ValueError: if the refs with label 'branch_name' does not exists or is not unique. """ repo = Repo() - branch = [b for b in repo.refs if b.name == branch_name] + branch = [b for b in repo.refs if b.name == branch_name] # type: ignore if len(branch) == 0: ex_msg = f"Branch '{branch_name}' does not exist." raise ValueError(ex_msg) @@ -40,5 +40,4 @@ def get_files_of_commit(self, commit_sha) -> Iterable[str]: Returns the files of the specific commits of the repo in the cwd. """ repo = Repo() - return repo.commit(commit_sha).stats.files.keys() - + return repo.commit(commit_sha).stats.files.keys() # type: ignore diff --git a/exasol_script_languages_container_ci/lib/github_release_asset_uploader.py b/exasol_script_languages_container_ci/lib/github_release_asset_uploader.py index 0c7a33f..18b4a0c 100644 --- a/exasol_script_languages_container_ci/lib/github_release_asset_uploader.py +++ b/exasol_script_languages_container_ci/lib/github_release_asset_uploader.py @@ -1,29 +1,44 @@ import logging from pathlib import Path -from github import Github, GithubException +from github import ( + Github, + GithubException, +) -class GithubReleaseAssetUploader(object): +class GithubReleaseAssetUploader: """ Implements upload to a Github Release. See https://docs.github.com/en/rest/releases/assets#upload-a-release-asset for details. The access token needs to be stored in the environment variable GITHUB_TOKEN """ + def __init__(self, token): self._token = token - def upload(self, archive_path: str, label: str, repo_id: str, release_id: int, content_type: str): + def upload( + self, + archive_path: str, + label: str, + repo_id: str, + release_id: int, + content_type: str, + ): gh = Github(self._token) gh_repo = gh.get_repo(repo_id) release = gh_repo.get_release(release_id) # Check GH limitation # https://docs.github.com/en/repositories/releasing-projects-on-github/about-releases#storage-and-bandwidth-quotas - if Path(archive_path).stat().st_size >= 2 * (2 ** 30): + if Path(archive_path).stat().st_size >= 2 * (2**30): logging.error("File larger than 2GB. Skipping it...") else: try: - release.upload_asset(path=archive_path, label=label, content_type=content_type) + release.upload_asset( + path=archive_path, label=label, content_type=content_type + ) except GithubException as ex: - logging.error(f"Upload of asset {archive_path} to release {release_id} failed: {ex}") + logging.error( + f"Upload of asset {archive_path} to release {release_id} failed: {ex}" + ) raise ex diff --git a/exasol_script_languages_container_ci/lib/release.py b/exasol_script_languages_container_ci/lib/release.py index a45a244..af9ab6d 100644 --- a/exasol_script_languages_container_ci/lib/release.py +++ b/exasol_script_languages_container_ci/lib/release.py @@ -3,7 +3,9 @@ from pathlib import Path from typing import Callable -from exasol_integration_test_docker_environment.cli.options.system_options import DEFAULT_OUTPUT_DIRECTORY +from exasol_integration_test_docker_environment.cli.options.system_options import ( + DEFAULT_OUTPUT_DIRECTORY, +) from exasol_integration_test_docker_environment.lib.base import luigi_log_config from exasol_script_languages_container_ci.lib.ci_build import CIBuild @@ -15,20 +17,22 @@ from exasol_script_languages_container_ci.lib.release_uploader import ReleaseUploader -def release(flavor: str, - docker_user: str, - docker_password: str, - docker_release_repository: str, - build_config: Config, - source_repo_url: str, - release_id: int, - is_dry_run: bool, - release_uploader: ReleaseUploader, - ci_build: CIBuild = CIBuild(), - ci_execute_tests: CIExecuteTest = CIExecuteTest(), - ci_push: CIPush = CIPush(), - ci_security_scan: CISecurityScan = CISecurityScan(), - ci_prepare: CIPrepare = CIPrepare()): +def release( + flavor: str, + docker_user: str, + docker_password: str, + docker_release_repository: str, + build_config: Config, + source_repo_url: str, + release_id: int, + is_dry_run: bool, + release_uploader: ReleaseUploader, + ci_build: CIBuild = CIBuild(), + ci_execute_tests: CIExecuteTest = CIExecuteTest(), + ci_push: CIPush = CIPush(), + ci_security_scan: CISecurityScan = CISecurityScan(), + ci_prepare: CIPrepare = CIPrepare(), +): """ Run Release build: 1. Build image @@ -41,25 +45,32 @@ def release(flavor: str, flavor_path = (f"flavors/{flavor}",) test_container_folder = "test_container" ci_prepare.prepare() - ci_build.build(flavor_path=flavor_path, rebuild=True, - build_docker_repository=None, - commit_sha="", - docker_user=None, - docker_password=None, - test_container_folder=test_container_folder) - ci_execute_tests.execute_tests(flavor_path=flavor_path, - docker_user=docker_user, - docker_password=docker_password, - test_container_folder=test_container_folder) + ci_build.build( + flavor_path=flavor_path, + rebuild=True, + build_docker_repository=None, + commit_sha="", + docker_user=None, # type: ignore + docker_password=None, # type: ignore + test_container_folder=test_container_folder, + ) + ci_execute_tests.execute_tests( + flavor_path=flavor_path, + docker_user=docker_user, + docker_password=docker_password, + test_container_folder=test_container_folder, + ) ci_security_scan.run_security_scan(flavor_path=flavor_path) if not is_dry_run: - ci_push.push(flavor_path=flavor_path, - target_docker_repository=docker_release_repository, - target_docker_tag_prefix="", - docker_user=docker_user, - docker_password=docker_password) + ci_push.push( + flavor_path=flavor_path, + target_docker_repository=docker_release_repository, + target_docker_tag_prefix="", + docker_user=docker_user, + docker_password=docker_password, + ) else: logging.info("Skipping push to docker release repository due to dry-run.") - release_uploader.release_upload(flavor_path=flavor_path, - source_repo_url=source_repo_url, - release_id=release_id) + release_uploader.release_upload( + flavor_path=flavor_path, source_repo_url=source_repo_url, release_id=release_id + ) diff --git a/exasol_script_languages_container_ci/lib/release_uploader.py b/exasol_script_languages_container_ci/lib/release_uploader.py index c9d1d66..ca136c7 100644 --- a/exasol_script_languages_container_ci/lib/release_uploader.py +++ b/exasol_script_languages_container_ci/lib/release_uploader.py @@ -15,8 +15,10 @@ def _parse_repo_url(source_repo_url: str) -> str: """ res = re.search(r"^https://github.com/([a-zA-Z0-9\-_/]+)$", source_repo_url) if res is None: - raise ValueError(f"Parameter source_repo_url={source_repo_url} does not match the following regex: " - f"^https://github.com/([a-zA-Z0-9\-_/]+)$") + raise ValueError( + f"Parameter source_repo_url={source_repo_url} does not match the following regex: " + rf"^https://github.com/([a-zA-Z0-9\-_/]+)$" + ) return res.groups()[0] @@ -26,10 +28,9 @@ def __init__(self, asset_uploader: AssetUploader, ci_export: CIExport = CIExport self._ci_export = ci_export self._asset_uploader = asset_uploader - def release_upload(self, - flavor_path: Tuple[str, ...], - source_repo_url: str, - release_id: int) -> None: + def release_upload( + self, flavor_path: Tuple[str, ...], source_repo_url: str, release_id: int + ) -> None: """ Exports the container into tar.gz(s) and uploads to the repository / release. release_key is expected to have the following format: "{key}:{value}" where {key} can be: @@ -47,11 +48,13 @@ def release_upload(self, content_type="application/gzip", artifact_path=temp_dir, file_suffix=".tar.gz", - label_prefix="Flavor") + label_prefix="Flavor", + ) self._asset_uploader.upload_assets( repo_id=repo_id, release_id=release_id, content_type="text/plain", artifact_path=temp_dir, file_suffix=".tar.gz.sha512sum", - label_prefix="Checksum") + label_prefix="Checksum", + ) diff --git a/exasol_script_languages_container_ci/lib/render_template.py b/exasol_script_languages_container_ci/lib/render_template.py index 7b26bc8..41204e1 100644 --- a/exasol_script_languages_container_ci/lib/render_template.py +++ b/exasol_script_languages_container_ci/lib/render_template.py @@ -2,7 +2,10 @@ def render_template(template: str, **kwargs): - env = jinja2.Environment(loader=jinja2.PackageLoader("exasol_script_languages_container_ci"), - autoescape=jinja2.select_autoescape(), keep_trailing_newline=True) + env = jinja2.Environment( + loader=jinja2.PackageLoader("exasol_script_languages_container_ci"), + autoescape=jinja2.select_autoescape(), + keep_trailing_newline=True, + ) t = env.get_template(template) return t.render(**kwargs) diff --git a/exasol_script_languages_container_ci/main.py b/exasol_script_languages_container_ci/main.py index ddbdfb5..bfb017f 100755 --- a/exasol_script_languages_container_ci/main.py +++ b/exasol_script_languages_container_ci/main.py @@ -2,5 +2,5 @@ # from exasol_script_languages_container_ci.cli.cli import cli -if __name__ == '__main__': +if __name__ == "__main__": cli() diff --git a/exasol_script_languages_container_ci/version.py b/exasol_script_languages_container_ci/version.py new file mode 100644 index 0000000..d563c8e --- /dev/null +++ b/exasol_script_languages_container_ci/version.py @@ -0,0 +1,10 @@ +# ATTENTION: +# This file is generated by exasol/toolbox/pre_commit_hooks/package_version.py when using: +# * either "poetry run nox -s fix" +# * or "poetry run version-check --fix" +# Do not edit this file manually! +# If you need to change the version, do so in the project.toml, e.g. by using `poetry version X.Y.Z`. +MAJOR = 1 +MINOR = 6 +PATCH = 0 +VERSION = f"{MAJOR}.{MINOR}.{PATCH}" diff --git a/noxconfig.py b/noxconfig.py new file mode 100644 index 0000000..03c5fed --- /dev/null +++ b/noxconfig.py @@ -0,0 +1,20 @@ +from __future__ import annotations + +from dataclasses import dataclass +from pathlib import Path +from typing import Iterable + +from nox import Session + + +@dataclass(frozen=True) +class Config: + root: Path = Path(__file__).parent + doc: Path = Path(__file__).parent / "doc" + version_file: Path = ( + Path(__file__).parent / "exasol_script_languages_container_ci" / "version.py" + ) + path_filters: Iterable[str] = ("dist", ".eggs", "venv", "resources") + + +PROJECT_CONFIG = Config() diff --git a/noxfile.py b/noxfile.py new file mode 100644 index 0000000..0738e13 --- /dev/null +++ b/noxfile.py @@ -0,0 +1,7 @@ +import nox + +# imports all nox task provided by the toolbox +from exasol.toolbox.nox.tasks import * # type: ignore + +# default actions to be run if nothing is explicitly specified with the -s option +nox.options.sessions = ["fix"] diff --git a/poetry.lock b/poetry.lock index 5a2e341..07fb069 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,15 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. + +[[package]] +name = "alabaster" +version = "0.7.16" +description = "A light, configurable Sphinx theme" +optional = false +python-versions = ">=3.9" +files = [ + {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, + {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, +] [[package]] name = "annotated-types" @@ -13,13 +24,13 @@ files = [ [[package]] name = "anyio" -version = "4.4.0" +version = "4.6.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, - {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, + {file = "anyio-4.6.0-py3-none-any.whl", hash = "sha256:c7d2e9d63e31599eeb636c8c5c03a7e108d73b345f064f1c19fdc87b79036a9a"}, + {file = "anyio-4.6.0.tar.gz", hash = "sha256:137b4559cbb034c477165047febb6ff83f390fc3b20bf181c1fc0a728cb8beeb"}, ] [package.dependencies] @@ -29,9 +40,9 @@ sniffio = ">=1.1" typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.21.0b1)"] +trio = ["trio (>=0.26.1)"] [[package]] name = "argcomplete" @@ -47,6 +58,20 @@ files = [ [package.extras] test = ["coverage", "mypy", "pexpect", "ruff", "wheel"] +[[package]] +name = "astroid" +version = "3.3.4" +description = "An abstract syntax tree for Python with inference support." +optional = false +python-versions = ">=3.9.0" +files = [ + {file = "astroid-3.3.4-py3-none-any.whl", hash = "sha256:5eba185467253501b62a9f113c263524b4f5d55e1b30456370eed4cdbd6438fd"}, + {file = "astroid-3.3.4.tar.gz", hash = "sha256:e73d0b62dd680a7c07cb2cd0ce3c22570b044dd01bd994bc3a2dd16c6cbba162"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} + [[package]] name = "attrs" version = "24.2.0" @@ -66,6 +91,45 @@ docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphi tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] +[[package]] +name = "babel" +version = "2.16.0" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.8" +files = [ + {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, + {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, +] + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + +[[package]] +name = "bandit" +version = "1.7.10" +description = "Security oriented static analyser for python code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bandit-1.7.10-py3-none-any.whl", hash = "sha256:665721d7bebbb4485a339c55161ac0eedde27d51e638000d91c8c2d68343ad02"}, + {file = "bandit-1.7.10.tar.gz", hash = "sha256:59ed5caf5d92b6ada4bf65bc6437feea4a9da1093384445fed4d472acc6cff7b"}, +] + +[package.dependencies] +colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} +PyYAML = ">=5.3.1" +rich = "*" +stevedore = ">=1.20.0" +tomli = {version = ">=1.1.0", optional = true, markers = "python_version < \"3.11\" and extra == \"toml\""} + +[package.extras] +baseline = ["GitPython (>=3.1.30)"] +sarif = ["jschema-to-python (>=1.2.3)", "sarif-om (>=1.0.4)"] +test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)"] +toml = ["tomli (>=1.1.0)"] +yaml = ["PyYAML"] + [[package]] name = "bcrypt" version = "4.2.0" @@ -106,6 +170,27 @@ files = [ tests = ["pytest (>=3.2.1,!=3.3.0)"] typecheck = ["mypy"] +[[package]] +name = "beautifulsoup4" +version = "4.12.3" +description = "Screen-scraping library" +optional = false +python-versions = ">=3.6.0" +files = [ + {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, + {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, +] + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] +html5lib = ["html5lib"] +lxml = ["lxml"] + [[package]] name = "black" version = "24.8.0" @@ -165,83 +250,94 @@ files = [ [[package]] name = "cffi" -version = "1.17.0" +version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" files = [ - {file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"}, - {file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"}, - {file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"}, - {file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"}, - {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"}, - {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"}, - {file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"}, - {file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"}, - {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"}, - {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"}, - {file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"}, - {file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"}, - {file = "cffi-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118"}, - {file = "cffi-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f"}, - {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0"}, - {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4"}, - {file = "cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a"}, - {file = "cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7"}, - {file = "cffi-1.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c"}, - {file = "cffi-1.17.0-cp38-cp38-win32.whl", hash = "sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499"}, - {file = "cffi-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4"}, - {file = "cffi-1.17.0-cp39-cp39-win32.whl", hash = "sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb"}, - {file = "cffi-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29"}, - {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] [package.dependencies] pycparser = "*" +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + [[package]] name = "charset-normalizer" version = "3.3.2" @@ -366,6 +462,107 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "colorlog" +version = "6.8.2" +description = "Add colours to the output of Python's logging module." +optional = false +python-versions = ">=3.6" +files = [ + {file = "colorlog-6.8.2-py3-none-any.whl", hash = "sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33"}, + {file = "colorlog-6.8.2.tar.gz", hash = "sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +development = ["black", "flake8", "mypy", "pytest", "types-colorama"] + +[[package]] +name = "coverage" +version = "7.6.1" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, + {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, + {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, + {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, + {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, + {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, + {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, + {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, + {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, + {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, + {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, + {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, + {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, + {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, + {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, + {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, + {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, +] + +[package.extras] +toml = ["tomli"] + [[package]] name = "cryptography" version = "43.0.1" @@ -417,13 +614,13 @@ test-randomorder = ["pytest-randomly"] [[package]] name = "datamodel-code-generator" -version = "0.26.0" +version = "0.26.1" description = "Datamodel Code Generator" optional = false python-versions = "<4.0,>=3.8" files = [ - {file = "datamodel_code_generator-0.26.0-py3-none-any.whl", hash = "sha256:29af7c1991d6b346d4f6cdf2f11b74c09d84c73b1ed7229adc4f64316dfbb48d"}, - {file = "datamodel_code_generator-0.26.0.tar.gz", hash = "sha256:5d1db3dd4d31ee8bb70426d8705f5b059bea15e0dd79e015a28373b5da98ff00"}, + {file = "datamodel_code_generator-0.26.1-py3-none-any.whl", hash = "sha256:bbe8a6cc0b9cfdbfd294e336e02b4c50b481ffc3b3c608b5578b6d7aa02cc8ae"}, + {file = "datamodel_code_generator-0.26.1.tar.gz", hash = "sha256:3b7b49c4230fa197ca28847e1e8996cd664638a7e91796c826a61c60d4ccd8a2"}, ] [package.dependencies] @@ -435,9 +632,9 @@ isort = ">=4.3.21,<6.0" jinja2 = ">=2.10.1,<4.0" packaging = "*" pydantic = [ + {version = ">=1.9.0,<2.4.0 || >2.4.0,<3.0", extras = ["email"], markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, {version = ">=1.10.0,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.4.0 || >2.4.0,<3.0", extras = ["email"], markers = "python_version >= \"3.12\" and python_version < \"4.0\""}, {version = ">=1.10.0,<2.4.0 || >2.4.0,<3.0", extras = ["email"], markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, - {version = ">=1.9.0,<2.4.0 || >2.4.0,<3.0", extras = ["email"], markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, ] pyyaml = ">=6.0.1" toml = {version = ">=0.10.0,<1.0.0", markers = "python_version < \"3.11\""} @@ -476,6 +673,32 @@ wrapt = ">=1.10,<2" [package.extras] dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] +[[package]] +name = "dill" +version = "0.3.9" +description = "serialize all of Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, + {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, +] + +[package.extras] +graph = ["objgraph (>=1.7.2)"] +profile = ["gprof2dot (>=2022.7.29)"] + +[[package]] +name = "distlib" +version = "0.3.8" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, +] + [[package]] name = "dnspython" version = "2.6.1" @@ -575,13 +798,13 @@ files = [ [[package]] name = "exasol-integration-test-docker-environment" -version = "3.1.0" +version = "3.2.0" description = "Integration Test Docker Environment for Exasol" optional = false python-versions = "<4,>=3.8" files = [ - {file = "exasol_integration_test_docker_environment-3.1.0-py3-none-any.whl", hash = "sha256:ce3cf917bd660ef5e1c59281ba0c1ea7c7dc3c43c9454040a6e1a3d90124a1e4"}, - {file = "exasol_integration_test_docker_environment-3.1.0.tar.gz", hash = "sha256:dbcbb967bb3458f74c8764a1cfc4f62ab215b79ee754bb4d17d4ef54c74f7f0a"}, + {file = "exasol_integration_test_docker_environment-3.2.0-py3-none-any.whl", hash = "sha256:b0fc41a70b73ec5ad43171e2c8fcf76a54a1eb31befa09e6a3214af55f1c93fc"}, + {file = "exasol_integration_test_docker_environment-3.2.0.tar.gz", hash = "sha256:47b11dde66be0149d54cfa6d7eebc4665bd996fccb22f64ce6489a07cec7b331"}, ] [package.dependencies] @@ -607,13 +830,13 @@ simplejson = ">=3.16.0" [[package]] name = "exasol-saas-api" -version = "0.9.0" +version = "0.10.0" description = "API enabling Python applications connecting to Exasol database SaaS instances and using their SaaS services" optional = false python-versions = "<4.0.0,>=3.10.0" files = [ - {file = "exasol_saas_api-0.9.0-py3-none-any.whl", hash = "sha256:86b350ebc7257cf0e2f532e0fa0683ed53347ec7a96f7410a952d49ccc038aa0"}, - {file = "exasol_saas_api-0.9.0.tar.gz", hash = "sha256:0a6f051dd508b41eec66cefcf57369ccace04a8bc2e546a588202fa3a50dfee8"}, + {file = "exasol_saas_api-0.10.0-py3-none-any.whl", hash = "sha256:13ad9f62e37e95e0359da875d44369c38c511b72b374cdc46bfffe0de2d3f730"}, + {file = "exasol_saas_api-0.10.0.tar.gz", hash = "sha256:8e7febf0c1eb777c38f5631cfb8d45c82a63b463a1e43134117c7fde508a8504"}, ] [package.dependencies] @@ -643,6 +866,40 @@ importlib_metadata = ">=4.6.0" importlib-resources = ">=6.4.0" networkx = ">=3.3.0,<4.0.0" +[[package]] +name = "exasol-toolbox" +version = "0.15.0" +description = "Your one-stop solution for managing all standard tasks and core workflows of your Python project." +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "exasol_toolbox-0.15.0-py3-none-any.whl", hash = "sha256:26d0475a53b868a568ca3b58c50bc2776867d2c6155cca14aae75fac4c7efcfa"}, + {file = "exasol_toolbox-0.15.0.tar.gz", hash = "sha256:14260470633abadb6e4bd3aed49250bfcdef4f14cd09b5878dd42d43770b8c78"}, +] + +[package.dependencies] +bandit = {version = ">=1.7.9,<2.0.0", extras = ["toml"]} +black = ">=24.1.0" +coverage = ">=6.4.4,<8.0.0" +furo = ">=2022.9.15" +importlib-resources = ">=5.12.0" +isort = ">=5.12.0,<6.0.0" +mypy = ">=0.971" +myst-parser = ">=2.0.0,<4" +nox = ">=2022.8.7" +pluggy = ">=1.5.0,<2.0.0" +pre-commit = ">=3.1.1,<4" +prysk = {version = ">0.17.0,<1", extras = ["pytest-plugin"]} +pylint = ">=2.15.4" +pytest = ">=7.2.2,<9" +pyupgrade = ">=2.38.2,<4.0.0" +shibuya = ">=2024.5.14" +sphinx = ">=5.3,<8" +sphinx-copybutton = ">=0.5.0,<0.6.0" +sphinx-design = ">=0.5.0,<1" +sphinx-inline-tabs = ">=2023.4.21,<2024.0.0" +typer = {version = ">=0.7.0", extras = ["all"]} + [[package]] name = "exceptiongroup" version = "1.2.2" @@ -677,6 +934,39 @@ paramiko = ">=2.4" [package.extras] pytest = ["pytest (>=7)"] +[[package]] +name = "filelock" +version = "3.16.1" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, + {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] +typing = ["typing-extensions (>=4.12.2)"] + +[[package]] +name = "furo" +version = "2024.8.6" +description = "A clean customisable Sphinx documentation theme." +optional = false +python-versions = ">=3.8" +files = [ + {file = "furo-2024.8.6-py3-none-any.whl", hash = "sha256:6cd97c58b47813d3619e63e9081169880fbe331f0ca883c871ff1f3f11814f5c"}, + {file = "furo-2024.8.6.tar.gz", hash = "sha256:b63e4cee8abfc3136d3bc03a3d45a76a850bada4d6374d24c1716b0e01394a01"}, +] + +[package.dependencies] +beautifulsoup4 = "*" +pygments = ">=2.7" +sphinx = ">=6.0,<9.0" +sphinx-basic-ng = ">=1.0.0.beta2" + [[package]] name = "genson" version = "1.3.0" @@ -733,13 +1023,13 @@ files = [ [[package]] name = "httpcore" -version = "1.0.5" +version = "1.0.6" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, - {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, + {file = "httpcore-1.0.6-py3-none-any.whl", hash = "sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f"}, + {file = "httpcore-1.0.6.tar.gz", hash = "sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f"}, ] [package.dependencies] @@ -750,7 +1040,7 @@ h11 = ">=0.13,<0.15" asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.26.0)"] +trio = ["trio (>=0.22.0,<1.0)"] [[package]] name = "httpx" @@ -791,17 +1081,34 @@ files = [ [package.dependencies] pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_version >= \"3.8\""} +[[package]] +name = "identify" +version = "2.6.1" +description = "File identification library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0"}, + {file = "identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98"}, +] + +[package.extras] +license = ["ukkonen"] + [[package]] name = "idna" -version = "3.8" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" files = [ - {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, - {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "ifaddr" version = "0.2.0" @@ -813,34 +1120,49 @@ files = [ {file = "ifaddr-0.2.0.tar.gz", hash = "sha256:cc0cbfcaabf765d44595825fb96a99bb12c79716b73b44330ea38ee2b0c4aed4"}, ] +[[package]] +name = "imagesize" +version = "1.4.1" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] + [[package]] name = "importlib-metadata" -version = "8.4.0" +version = "8.5.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1"}, - {file = "importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5"}, + {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, + {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, ] [package.dependencies] -zipp = ">=0.5" +zipp = ">=3.20" [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] [[package]] name = "importlib-resources" -version = "6.4.4" +version = "6.4.5" description = "Read resources from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_resources-6.4.4-py3-none-any.whl", hash = "sha256:dda242603d1c9cd836c3368b1174ed74cb4049ecd209e7a1a0104620c18c5c11"}, - {file = "importlib_resources-6.4.4.tar.gz", hash = "sha256:20600c8b7361938dc0bb2d5ec0297802e575df486f5a544fa414da65e13721f7"}, + {file = "importlib_resources-6.4.5-py3-none-any.whl", hash = "sha256:ac29d5f956f01d5e4bb63102a5a19957f1b9175e45649977264a1416783bb717"}, + {file = "importlib_resources-6.4.5.tar.gz", hash = "sha256:980862a1d16c9e147a59603677fa2aa5fd82b87f223b6cb870695bcfce830065"}, ] [package.extras] @@ -959,12 +1281,12 @@ files = [ [[package]] name = "luigi" -version = "3.5.1" +version = "3.5.2" description = "Workflow mgmgt + task scheduling + dependency resolution." optional = false python-versions = "*" files = [ - {file = "luigi-3.5.1.tar.gz", hash = "sha256:fc790b2747515dd19c673efbb8e4c9ace5f4c5cdc31f8e7f93dc667deb2ec6c8"}, + {file = "luigi-3.5.2.tar.gz", hash = "sha256:d000fe6a6ea77c9376674fe87a045ac00c3fcf7ebe8414655a06630aa9db5111"}, ] [package.dependencies] @@ -978,6 +1300,30 @@ jsonschema = ["jsonschema"] prometheus = ["prometheus-client (>=0.5,<0.15)"] toml = ["toml (<2.0.0)"] +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + [[package]] name = "markupsafe" version = "2.1.5" @@ -1047,6 +1393,94 @@ files = [ {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mdit-py-plugins" +version = "0.4.2" +description = "Collection of plugins for markdown-it-py" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mdit_py_plugins-0.4.2-py3-none-any.whl", hash = "sha256:0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636"}, + {file = "mdit_py_plugins-0.4.2.tar.gz", hash = "sha256:5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5"}, +] + +[package.dependencies] +markdown-it-py = ">=1.0.0,<4.0.0" + +[package.extras] +code-style = ["pre-commit"] +rtd = ["myst-parser", "sphinx-book-theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "mypy" +version = "1.11.2" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.11.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d42a6dd818ffce7be66cce644f1dff482f1d97c53ca70908dff0b9ddc120b77a"}, + {file = "mypy-1.11.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:801780c56d1cdb896eacd5619a83e427ce436d86a3bdf9112527f24a66618fef"}, + {file = "mypy-1.11.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41ea707d036a5307ac674ea172875f40c9d55c5394f888b168033177fce47383"}, + {file = "mypy-1.11.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6e658bd2d20565ea86da7d91331b0eed6d2eee22dc031579e6297f3e12c758c8"}, + {file = "mypy-1.11.2-cp310-cp310-win_amd64.whl", hash = "sha256:478db5f5036817fe45adb7332d927daa62417159d49783041338921dcf646fc7"}, + {file = "mypy-1.11.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75746e06d5fa1e91bfd5432448d00d34593b52e7e91a187d981d08d1f33d4385"}, + {file = "mypy-1.11.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a976775ab2256aadc6add633d44f100a2517d2388906ec4f13231fafbb0eccca"}, + {file = "mypy-1.11.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cd953f221ac1379050a8a646585a29574488974f79d8082cedef62744f0a0104"}, + {file = "mypy-1.11.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:57555a7715c0a34421013144a33d280e73c08df70f3a18a552938587ce9274f4"}, + {file = "mypy-1.11.2-cp311-cp311-win_amd64.whl", hash = "sha256:36383a4fcbad95f2657642a07ba22ff797de26277158f1cc7bd234821468b1b6"}, + {file = "mypy-1.11.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318"}, + {file = "mypy-1.11.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36"}, + {file = "mypy-1.11.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987"}, + {file = "mypy-1.11.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca"}, + {file = "mypy-1.11.2-cp312-cp312-win_amd64.whl", hash = "sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70"}, + {file = "mypy-1.11.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:37c7fa6121c1cdfcaac97ce3d3b5588e847aa79b580c1e922bb5d5d2902df19b"}, + {file = "mypy-1.11.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4a8a53bc3ffbd161b5b2a4fff2f0f1e23a33b0168f1c0778ec70e1a3d66deb86"}, + {file = "mypy-1.11.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ff93107f01968ed834f4256bc1fc4475e2fecf6c661260066a985b52741ddce"}, + {file = "mypy-1.11.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:edb91dded4df17eae4537668b23f0ff6baf3707683734b6a818d5b9d0c0c31a1"}, + {file = "mypy-1.11.2-cp38-cp38-win_amd64.whl", hash = "sha256:ee23de8530d99b6db0573c4ef4bd8f39a2a6f9b60655bf7a1357e585a3486f2b"}, + {file = "mypy-1.11.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:801ca29f43d5acce85f8e999b1e431fb479cb02d0e11deb7d2abb56bdaf24fd6"}, + {file = "mypy-1.11.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af8d155170fcf87a2afb55b35dc1a0ac21df4431e7d96717621962e4b9192e70"}, + {file = "mypy-1.11.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f7821776e5c4286b6a13138cc935e2e9b6fde05e081bdebf5cdb2bb97c9df81d"}, + {file = "mypy-1.11.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:539c570477a96a4e6fb718b8d5c3e0c0eba1f485df13f86d2970c91f0673148d"}, + {file = "mypy-1.11.2-cp39-cp39-win_amd64.whl", hash = "sha256:3f14cd3d386ac4d05c5a39a51b84387403dadbd936e17cb35882134d4f8f0d24"}, + {file = "mypy-1.11.2-py3-none-any.whl", hash = "sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12"}, + {file = "mypy-1.11.2.tar.gz", hash = "sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.6.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + [[package]] name = "mypy-extensions" version = "1.0.0" @@ -1058,6 +1492,32 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] +[[package]] +name = "myst-parser" +version = "3.0.1" +description = "An extended [CommonMark](https://spec.commonmark.org/) compliant parser," +optional = false +python-versions = ">=3.8" +files = [ + {file = "myst_parser-3.0.1-py3-none-any.whl", hash = "sha256:6457aaa33a5d474aca678b8ead9b3dc298e89c68e67012e73146ea6fd54babf1"}, + {file = "myst_parser-3.0.1.tar.gz", hash = "sha256:88f0cb406cb363b077d176b51c476f62d60604d68a8dcdf4832e080441301a87"}, +] + +[package.dependencies] +docutils = ">=0.18,<0.22" +jinja2 = "*" +markdown-it-py = ">=3.0,<4.0" +mdit-py-plugins = ">=0.4,<1.0" +pyyaml = "*" +sphinx = ">=6,<8" + +[package.extras] +code-style = ["pre-commit (>=3.0,<4.0)"] +linkify = ["linkify-it-py (>=2.0,<3.0)"] +rtd = ["ipython", "sphinx (>=7)", "sphinx-autodoc2 (>=0.5.0,<0.6.0)", "sphinx-book-theme (>=1.1,<2.0)", "sphinx-copybutton", "sphinx-design", "sphinx-pyscript", "sphinx-tippy (>=0.4.3)", "sphinx-togglebutton", "sphinxext-opengraph (>=0.9.0,<0.10.0)", "sphinxext-rediraffe (>=0.2.7,<0.3.0)"] +testing = ["beautifulsoup4", "coverage[toml]", "defusedxml", "pytest (>=8,<9)", "pytest-cov", "pytest-param-files (>=0.6.0,<0.7.0)", "pytest-regressions", "sphinx-pytest"] +testing-docutils = ["pygments", "pytest (>=8,<9)", "pytest-param-files (>=0.6.0,<0.7.0)"] + [[package]] name = "netaddr" version = "1.3.0" @@ -1090,6 +1550,39 @@ doc = ["myst-nb (>=1.0)", "numpydoc (>=1.7)", "pillow (>=9.4)", "pydata-sphinx-t extra = ["lxml (>=4.6)", "pydot (>=2.0)", "pygraphviz (>=1.12)", "sympy (>=1.10)"] test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] +[[package]] +name = "nodeenv" +version = "1.9.1" +description = "Node.js virtual environment builder" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, +] + +[[package]] +name = "nox" +version = "2024.4.15" +description = "Flexible test automation." +optional = false +python-versions = ">=3.7" +files = [ + {file = "nox-2024.4.15-py3-none-any.whl", hash = "sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565"}, + {file = "nox-2024.4.15.tar.gz", hash = "sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f"}, +] + +[package.dependencies] +argcomplete = ">=1.9.4,<4.0" +colorlog = ">=2.6.1,<7.0.0" +packaging = ">=20.9" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} +virtualenv = ">=20.14.1" + +[package.extras] +tox-to-nox = ["jinja2", "tox"] +uv = ["uv (>=0.1.6)"] + [[package]] name = "packaging" version = "24.1" @@ -1103,13 +1596,13 @@ files = [ [[package]] name = "paramiko" -version = "3.4.1" +version = "3.5.0" description = "SSH2 protocol library" optional = false python-versions = ">=3.6" files = [ - {file = "paramiko-3.4.1-py3-none-any.whl", hash = "sha256:8e49fd2f82f84acf7ffd57c64311aa2b30e575370dc23bdb375b10262f7eac32"}, - {file = "paramiko-3.4.1.tar.gz", hash = "sha256:8b15302870af7f6652f2e038975c1d2973f06046cb5d7d65355668b3ecbece0c"}, + {file = "paramiko-3.5.0-py3-none-any.whl", hash = "sha256:1fedf06b085359051cd7d0d270cebe19e755a8a921cc2ddbfa647fb0cd7d68f9"}, + {file = "paramiko-3.5.0.tar.gz", hash = "sha256:ad11e540da4f55cedda52931f1a3f812a8238a7af7f62a60de538cd80bb28124"}, ] [package.dependencies] @@ -1133,21 +1626,32 @@ files = [ {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] +[[package]] +name = "pbr" +version = "6.1.0" +description = "Python Build Reasonableness" +optional = false +python-versions = ">=2.6" +files = [ + {file = "pbr-6.1.0-py2.py3-none-any.whl", hash = "sha256:a776ae228892d8013649c0aeccbb3d5f99ee15e005a4cbb7e61d55a067b28a2a"}, + {file = "pbr-6.1.0.tar.gz", hash = "sha256:788183e382e3d1d7707db08978239965e8b9e4e5ed42669bf4758186734d5f24"}, +] + [[package]] name = "platformdirs" -version = "4.2.2" +version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" @@ -1183,6 +1687,42 @@ docs = ["sphinx (>=1.7.1)"] redis = ["redis"] tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"] +[[package]] +name = "pre-commit" +version = "3.8.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.9" +files = [ + {file = "pre_commit-3.8.0-py2.py3-none-any.whl", hash = "sha256:9a90a53bf82fdd8778d58085faf8d83df56e40dfe18f45b19446e26bf1b3a63f"}, + {file = "pre_commit-3.8.0.tar.gz", hash = "sha256:8bb6494d4a20423842e198980c9ecf9f96607a07ea29549e180eef9ae80fe7af"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "prysk" +version = "0.20.0" +description = "Functional tests for command line applications" +optional = false +python-versions = "<4.0.0,>=3.8" +files = [ + {file = "prysk-0.20.0-py3-none-any.whl", hash = "sha256:3758f59febe1ff27710c8ba69a8edad42286050d041ed8df519fc4bbeea41133"}, + {file = "prysk-0.20.0.tar.gz", hash = "sha256:3499d24c9c8d534754d3915218cb2ab59cf59a8d6f37acfb68dc582650e67e33"}, +] + +[package.dependencies] +pytest-prysk = {version = ">=0.2.0,<0.3.0", optional = true, markers = "extra == \"pytest-plugin\""} +rich = ">=13.3.1,<14.0.0" + +[package.extras] +pytest-plugin = ["pytest-prysk (>=0.2.0,<0.3.0)"] + [[package]] name = "pycparser" version = "2.22" @@ -1196,123 +1736,124 @@ files = [ [[package]] name = "pydantic" -version = "2.8.2" +version = "2.9.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, - {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, + {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, + {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, ] [package.dependencies] -annotated-types = ">=0.4.0" +annotated-types = ">=0.6.0" email-validator = {version = ">=2.0.0", optional = true, markers = "extra == \"email\""} -pydantic-core = "2.20.1" +pydantic-core = "2.23.4" typing-extensions = [ - {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, {version = ">=4.6.1", markers = "python_version < \"3.13\""}, + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, ] [package.extras] email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.20.1" +version = "2.23.4" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, - {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, - {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, - {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, - {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, - {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, - {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, - {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, - {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, - {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, - {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, - {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, - {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, - {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, + {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, + {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, + {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, + {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, + {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, + {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, + {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, + {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"}, + {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"}, + {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"}, + {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"}, + {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"}, + {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, ] [package.dependencies] @@ -1320,13 +1861,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pydot" -version = "3.0.1" +version = "3.0.2" description = "Python interface to Graphviz's Dot" optional = false python-versions = ">=3.8" files = [ - {file = "pydot-3.0.1-py3-none-any.whl", hash = "sha256:43f1e878dc1ff7c1c2e3470a6999d4e9e97771c5c862440c2f0af0ba844c231f"}, - {file = "pydot-3.0.1.tar.gz", hash = "sha256:e18cf7f287c497d77b536a3d20a46284568fea390776dface6eabbdf1b1b5efc"}, + {file = "pydot-3.0.2-py3-none-any.whl", hash = "sha256:99cedaa55d04abb0b2bc56d9981a6da781053dd5ac75c428e8dd53db53f90b14"}, + {file = "pydot-3.0.2.tar.gz", hash = "sha256:9180da540b51b3aa09fbf81140b3edfbe2315d778e8589a7d0a4a69c41332bae"}, ] [package.dependencies] @@ -1335,7 +1876,7 @@ pyparsing = ">=3.0.9" [package.extras] dev = ["chardet", "parameterized", "ruff"] release = ["zest.releaser[recommended]"] -tests = ["chardet", "parameterized", "ruff", "tox", "unittest-parallel"] +tests = ["chardet", "parameterized", "pytest", "pytest-cov", "pytest-xdist[psutil]", "ruff", "tox"] [[package]] name = "pygithub" @@ -1356,6 +1897,20 @@ requests = ">=2.14.0" typing-extensions = ">=4.0.0" urllib3 = ">=1.26.0" +[[package]] +name = "pygments" +version = "2.18.0" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + [[package]] name = "pyjwt" version = "2.9.0" @@ -1376,6 +1931,35 @@ dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pyte docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] +[[package]] +name = "pylint" +version = "3.3.1" +description = "python code static checker" +optional = false +python-versions = ">=3.9.0" +files = [ + {file = "pylint-3.3.1-py3-none-any.whl", hash = "sha256:2f846a466dd023513240bc140ad2dd73bfc080a5d85a710afdb728c420a5a2b9"}, + {file = "pylint-3.3.1.tar.gz", hash = "sha256:9f3dcc87b1203e612b78d91a896407787e708b3f189b5fa0b307712d49ff0c6e"}, +] + +[package.dependencies] +astroid = ">=3.3.4,<=3.4.0-dev0" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +dill = [ + {version = ">=0.2", markers = "python_version < \"3.11\""}, + {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, +] +isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" +mccabe = ">=0.6,<0.8" +platformdirs = ">=2.2.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +tomlkit = ">=0.10.1" + +[package.extras] +spelling = ["pyenchant (>=3.2,<4.0)"] +testutils = ["gitpython (>3)"] + [[package]] name = "pynacl" version = "1.5.0" @@ -1418,15 +2002,18 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pyreadline3" -version = "3.4.1" +version = "3.5.4" description = "A python implementation of GNU readline." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "pyreadline3-3.4.1-py3-none-any.whl", hash = "sha256:b0efb6516fd4fb07b45949053826a62fa4cb353db5be2bbb4a7aa1fdd1e345fb"}, - {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"}, + {file = "pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6"}, + {file = "pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7"}, ] +[package.extras] +dev = ["build", "flake8", "mypy", "pytest", "twine"] + [[package]] name = "pytest" version = "7.4.4" @@ -1466,6 +2053,21 @@ pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] +[[package]] +name = "pytest-prysk" +version = "0.2.0" +description = "Pytest plugin for prysk" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pytest_prysk-0.2.0-py3-none-any.whl", hash = "sha256:3180a9d3a6634e6e70107b2eed2a6a7420630b14ba2036598ef690f9b71be79f"}, + {file = "pytest_prysk-0.2.0.tar.gz", hash = "sha256:488d1f77e35beec9cad13e11368dcc5d09555ec31a4d6a3f9d901e78bbeeb2d1"}, +] + +[package.dependencies] +prysk = ">=0.15.0" +pytest = ">=7.3.2,<8.0.0" + [[package]] name = "python-daemon" version = "3.0.1" @@ -1500,6 +2102,20 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "pyupgrade" +version = "3.17.0" +description = "A tool to automatically upgrade syntax for newer versions." +optional = false +python-versions = ">=3.9" +files = [ + {file = "pyupgrade-3.17.0-py2.py3-none-any.whl", hash = "sha256:cbc8f67a61d3f4e7ca9c2ef57b9aae67f023d3780ce30c99fccec78401723754"}, + {file = "pyupgrade-3.17.0.tar.gz", hash = "sha256:d5dd1dcaf9a016c31508bb9d3d09fd335d736578092f91df52bb26ac30c37919"}, +] + +[package.dependencies] +tokenize-rt = ">=5.2.0" + [[package]] name = "pywin32" version = "306" @@ -1606,6 +2222,25 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "rich" +version = "13.9.1" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "rich-13.9.1-py3-none-any.whl", hash = "sha256:b340e739f30aa58921dc477b8adaa9ecdb7cecc217be01d93730ee1bc8aa83be"}, + {file = "rich-13.9.1.tar.gz", hash = "sha256:097cffdf85db1babe30cc7deba5ab3a29e1b9885047dab24c57e9a7f8a9c1466"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" +typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""} + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + [[package]] name = "setuptools" version = "73.0.1" @@ -1622,6 +2257,31 @@ core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.te doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +[[package]] +name = "shellingham" +version = "1.5.4" +description = "Tool to Detect Surrounding Shell" +optional = false +python-versions = ">=3.7" +files = [ + {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, + {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, +] + +[[package]] +name = "shibuya" +version = "2024.8.30" +description = "A clean, responsive, and customizable Sphinx documentation theme with light/dark mode." +optional = false +python-versions = ">=3.7" +files = [ + {file = "shibuya-2024.8.30-py3-none-any.whl", hash = "sha256:c5b865132babcddc4ed600b36a6d05ecc869979f358d9796268927e1eb7ef619"}, + {file = "shibuya-2024.8.30.tar.gz", hash = "sha256:0fdc75643ee40d00fb9a0f1f26ce1085e88df8e48b525a228fe202ffdf31883d"}, +] + +[package.dependencies] +Sphinx = "*" + [[package]] name = "simplejson" version = "3.19.3" @@ -1774,6 +2434,249 @@ files = [ {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +optional = false +python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + +[[package]] +name = "soupsieve" +version = "2.6" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = false +python-versions = ">=3.8" +files = [ + {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, + {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, +] + +[[package]] +name = "sphinx" +version = "7.4.7" +description = "Python documentation generator" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239"}, + {file = "sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe"}, +] + +[package.dependencies] +alabaster = ">=0.7.14,<0.8.0" +babel = ">=2.13" +colorama = {version = ">=0.4.6", markers = "sys_platform == \"win32\""} +docutils = ">=0.20,<0.22" +imagesize = ">=1.3" +Jinja2 = ">=3.1" +packaging = ">=23.0" +Pygments = ">=2.17" +requests = ">=2.30.0" +snowballstemmer = ">=2.2" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.9" +tomli = {version = ">=2", markers = "python_version < \"3.11\""} + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["flake8 (>=6.0)", "importlib-metadata (>=6.0)", "mypy (==1.10.1)", "pytest (>=6.0)", "ruff (==0.5.2)", "sphinx-lint (>=0.9)", "tomli (>=2)", "types-docutils (==0.21.0.20240711)", "types-requests (>=2.30.0)"] +test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=8.0)", "setuptools (>=70.0)", "typing_extensions (>=4.9)"] + +[[package]] +name = "sphinx-basic-ng" +version = "1.0.0b2" +description = "A modern skeleton for Sphinx themes." +optional = false +python-versions = ">=3.7" +files = [ + {file = "sphinx_basic_ng-1.0.0b2-py3-none-any.whl", hash = "sha256:eb09aedbabfb650607e9b4b68c9d240b90b1e1be221d6ad71d61c52e29f7932b"}, + {file = "sphinx_basic_ng-1.0.0b2.tar.gz", hash = "sha256:9ec55a47c90c8c002b5960c57492ec3021f5193cb26cebc2dc4ea226848651c9"}, +] + +[package.dependencies] +sphinx = ">=4.0" + +[package.extras] +docs = ["furo", "ipython", "myst-parser", "sphinx-copybutton", "sphinx-inline-tabs"] + +[[package]] +name = "sphinx-copybutton" +version = "0.5.2" +description = "Add a copy button to each of your code cells." +optional = false +python-versions = ">=3.7" +files = [ + {file = "sphinx-copybutton-0.5.2.tar.gz", hash = "sha256:4cf17c82fb9646d1bc9ca92ac280813a3b605d8c421225fd9913154103ee1fbd"}, + {file = "sphinx_copybutton-0.5.2-py3-none-any.whl", hash = "sha256:fb543fd386d917746c9a2c50360c7905b605726b9355cd26e9974857afeae06e"}, +] + +[package.dependencies] +sphinx = ">=1.8" + +[package.extras] +code-style = ["pre-commit (==2.12.1)"] +rtd = ["ipython", "myst-nb", "sphinx", "sphinx-book-theme", "sphinx-examples"] + +[[package]] +name = "sphinx-design" +version = "0.6.1" +description = "A sphinx extension for designing beautiful, view size responsive web components." +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinx_design-0.6.1-py3-none-any.whl", hash = "sha256:b11f37db1a802a183d61b159d9a202314d4d2fe29c163437001324fe2f19549c"}, + {file = "sphinx_design-0.6.1.tar.gz", hash = "sha256:b44eea3719386d04d765c1a8257caca2b3e6f8421d7b3a5e742c0fd45f84e632"}, +] + +[package.dependencies] +sphinx = ">=6,<9" + +[package.extras] +code-style = ["pre-commit (>=3,<4)"] +rtd = ["myst-parser (>=2,<4)"] +testing = ["defusedxml", "myst-parser (>=2,<4)", "pytest (>=8.3,<9.0)", "pytest-cov", "pytest-regressions"] +testing-no-myst = ["defusedxml", "pytest (>=8.3,<9.0)", "pytest-cov", "pytest-regressions"] +theme-furo = ["furo (>=2024.7.18,<2024.8.0)"] +theme-im = ["sphinx-immaterial (>=0.12.2,<0.13.0)"] +theme-pydata = ["pydata-sphinx-theme (>=0.15.2,<0.16.0)"] +theme-rtd = ["sphinx-rtd-theme (>=2.0,<3.0)"] +theme-sbt = ["sphinx-book-theme (>=1.1,<2.0)"] + +[[package]] +name = "sphinx-inline-tabs" +version = "2023.4.21" +description = "Add inline tabbed content to your Sphinx documentation." +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinx_inline_tabs-2023.4.21-py3-none-any.whl", hash = "sha256:06809ac613f7c48ddd6e2fa588413e3fe92cff2397b56e2ccf0b0218f9ef6a78"}, + {file = "sphinx_inline_tabs-2023.4.21.tar.gz", hash = "sha256:5df2f13f602c158f3f5f6c509e008aeada199a8c76d97ba3aa2822206683bebc"}, +] + +[package.dependencies] +sphinx = ">=3" + +[package.extras] +doc = ["furo", "myst-parser"] +test = ["pytest", "pytest-cov", "pytest-xdist"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "2.0.0" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, + {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, +] + +[package.extras] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "2.0.0" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, + {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, +] + +[package.extras] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.1.0" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, + {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, +] + +[package.extras] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["html5lib", "pytest"] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] + +[package.extras] +test = ["flake8", "mypy", "pytest"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "2.0.0" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, + {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, +] + +[package.extras] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["defusedxml (>=0.7.1)", "pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "2.0.0" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, + {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, +] + +[package.extras] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + +[[package]] +name = "stevedore" +version = "5.3.0" +description = "Manage dynamic plugins for Python applications" +optional = false +python-versions = ">=3.8" +files = [ + {file = "stevedore-5.3.0-py3-none-any.whl", hash = "sha256:1efd34ca08f474dad08d9b19e934a22c68bb6fe416926479ba29e5013bcc8f78"}, + {file = "stevedore-5.3.0.tar.gz", hash = "sha256:9a64265f4060312828151c204efbe9b7a9852a0d9228756344dbc7e4023e375a"}, +] + +[package.dependencies] +pbr = ">=2.0.0" + [[package]] name = "stopwatch-py" version = "2.0.1" @@ -1800,6 +2703,17 @@ files = [ doc = ["reno", "sphinx"] test = ["pytest", "tornado (>=4.5)", "typeguard"] +[[package]] +name = "tokenize-rt" +version = "6.0.0" +description = "A wrapper around the stdlib `tokenize` which roundtrips." +optional = false +python-versions = ">=3.8" +files = [ + {file = "tokenize_rt-6.0.0-py2.py3-none-any.whl", hash = "sha256:d4ff7ded2873512938b4f8cbb98c9b07118f01d30ac585a30d7a88353ca36d22"}, + {file = "tokenize_rt-6.0.0.tar.gz", hash = "sha256:b9711bdfc51210211137499b5e355d3de5ec88a85d2025c520cbb921b5194367"}, +] + [[package]] name = "toml" version = "0.10.2" @@ -1813,13 +2727,24 @@ files = [ [[package]] name = "tomli" -version = "2.0.1" +version = "2.0.2" description = "A lil' TOML parser" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +files = [ + {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"}, + {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"}, +] + +[[package]] +name = "tomlkit" +version = "0.13.2" +description = "Style preserving TOML library" +optional = false +python-versions = ">=3.8" files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, + {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, + {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, ] [[package]] @@ -1860,15 +2785,32 @@ typing-extensions = ">=4.10.0" doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.3.0)"] test = ["coverage[toml] (>=7)", "mypy (>=1.2.0)", "pytest (>=7)"] +[[package]] +name = "typer" +version = "0.12.5" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +optional = false +python-versions = ">=3.7" +files = [ + {file = "typer-0.12.5-py3-none-any.whl", hash = "sha256:62fe4e471711b147e3365034133904df3e235698399bc4de2b36c8579298d52b"}, + {file = "typer-0.12.5.tar.gz", hash = "sha256:f592f089bedcc8ec1b974125d64851029c3b1af145f04aca64d69410f0c9b722"}, +] + +[package.dependencies] +click = ">=8.0.0" +rich = ">=10.11.0" +shellingham = ">=1.3.0" +typing-extensions = ">=3.7.4.3" + [[package]] name = "types-requests" -version = "2.32.0.20240712" +version = "2.32.0.20240914" description = "Typing stubs for requests" optional = false python-versions = ">=3.8" files = [ - {file = "types-requests-2.32.0.20240712.tar.gz", hash = "sha256:90c079ff05e549f6bf50e02e910210b98b8ff1ebdd18e19c873cd237737c1358"}, - {file = "types_requests-2.32.0.20240712-py3-none-any.whl", hash = "sha256:f754283e152c752e46e70942fa2a146b5bc70393522257bb85bd1ef7e019dcc3"}, + {file = "types-requests-2.32.0.20240914.tar.gz", hash = "sha256:2850e178db3919d9bf809e434eef65ba49d0e7e33ac92d588f4a5e295fffd405"}, + {file = "types_requests-2.32.0.20240914-py3-none-any.whl", hash = "sha256:59c2f673eb55f32a99b2894faf6020e1a9f4a402ad0f192bfee0b64469054310"}, ] [package.dependencies] @@ -1887,13 +2829,13 @@ files = [ [[package]] name = "urllib3" -version = "2.2.2" +version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] @@ -1902,6 +2844,26 @@ h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] +[[package]] +name = "virtualenv" +version = "20.26.6" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.26.6-py3-none-any.whl", hash = "sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2"}, + {file = "virtualenv-20.26.6.tar.gz", hash = "sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + [[package]] name = "wrapt" version = "1.16.0" @@ -1983,13 +2945,13 @@ files = [ [[package]] name = "zipp" -version = "3.20.1" +version = "3.20.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.20.1-py3-none-any.whl", hash = "sha256:9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064"}, - {file = "zipp-3.20.1.tar.gz", hash = "sha256:c22b14cc4763c5a5b04134207736c107db42e9d3ef2d9779d465f5f1bcba572b"}, + {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, + {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, ] [package.extras] @@ -2003,4 +2965,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.10.0,<4.0" -content-hash = "d10704027944198548f228a0b7bdcfd8aacfe1f4b191d7cc479171209484211d" +content-hash = "e70412886e16ad227756b938a6a83c5eedd9e5f4800b925f3149d735595db7ba" diff --git a/pyproject.toml b/pyproject.toml index cc4ba72..1cc76db 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,6 +9,12 @@ authors = [ "Thomas Uebensee " ] +packages = [ + {include = "README.md"}, + {include = "LICENSE"}, + {include = "exasol_script_languages_container_ci"}, +] + [tool.poetry.dependencies] python = ">=3.10.0,<4.0" click = "^8.1.7" @@ -19,6 +25,9 @@ PyGithub = "^2.3.0" setuptools = "^73.0.0" datamodel-code-generator = ">=0.25.5 <0.30.0" +[tool.poetry.group.dev.dependencies] +exasol-toolbox = ">=0.15.0,<1.0" + [build-system] requires = ["poetry_core>=1.0.0"] build-backend = "poetry.core.masonry.api" @@ -28,11 +37,25 @@ toml = ">=0.10.2" pytest = "^7.1.1" pytest-mock = "^3.7.0" +[tool.isort] +profile = "black" +force_grid_wrap = 2 [tool.pytest.ini_options] minversion = "6.0" -addopts = "--ignore=test/integration_tests/resources" +addopts = "--ignore=test/integration/resources" testpaths = [ "test" ] + +[[tool.mypy.overrides]] +module = ["exasol_integration_test_docker_environment.*", "docker.*", "toml"] +ignore_missing_imports = true + + +[tool.pylint.'MESSAGES CONTROL'] +max-line-length = 120 +disable = """ + W,R,C, +""" diff --git a/scripts/build/check_git_status.sh b/scripts/build/check_git_status.sh deleted file mode 100755 index 35e4d65..0000000 --- a/scripts/build/check_git_status.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash - -#Prints git status and returns > 0 if working tree is dirty! If working tree is clean, it returns 0. - -git status --porcelain=v1 -uno -git diff --cached; git diff --cached --summary; -[ -z "$(git status --porcelain=v1 -uno 2>/dev/null)" ] - diff --git a/scripts/build/check_release.py b/scripts/build/check_release.py index e3434b6..0d0017a 100644 --- a/scripts/build/check_release.py +++ b/scripts/build/check_release.py @@ -1,8 +1,8 @@ import re from pathlib import Path -from git import Repo import toml +from git import Repo def get_git_version(): @@ -20,13 +20,15 @@ def get_git_version(): def get_poetry_version(): - parsed_toml = toml.load('pyproject.toml') + parsed_toml = toml.load("pyproject.toml") return parsed_toml["tool"]["poetry"]["version"].strip() def get_change_log_version(): # Path overloads __truediv__ - with open(Path(__file__).parent / ".." / ".." / "doc" / "changes" / "changelog.md") as changelog: + with open( + Path(__file__).parent / ".." / ".." / "doc" / "changes" / "changelog.md" + ) as changelog: changelog_str = changelog.read() # Search for the FIRST pattern like: "* [0.5.0](changes_0.5.0.md)" in the changelog file. # Note that we encapsulate the [(0.5.0)] with parenthesis, which tells re to return the matching string as group @@ -34,7 +36,7 @@ def get_change_log_version(): return version_match.groups()[0] -if __name__ == '__main__': +if __name__ == "__main__": poetry_version = get_poetry_version() latest_tag = get_git_version() changelog_version = get_change_log_version() diff --git a/scripts/build/check_release.sh b/scripts/build/check_release.sh deleted file mode 100755 index 350c721..0000000 --- a/scripts/build/check_release.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash -set -o errexit -set -o nounset -set -o pipefail - -SCRIPT_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")" - -#shellcheck source=./scripts/build/setup_poetry_env.sh -source "$SCRIPT_DIR/setup_poetry_env.sh" "$@" - -poetry run python3 -u "$SCRIPT_DIR/check_release.py" diff --git a/scripts/build/setup_poetry_env.sh b/scripts/build/setup_poetry_env.sh deleted file mode 100644 index 2bc4478..0000000 --- a/scripts/build/setup_poetry_env.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash -set -o errexit -set -o nounset -set -o pipefail - -PYTHON_VERSION=$1 - -PYTHON_BIN=$(command -v "$PYTHON_VERSION") -poetry env use "$PYTHON_BIN" -poetry install diff --git a/scripts/build/shellcheck.sh b/scripts/build/shellcheck.sh deleted file mode 100755 index d27b500..0000000 --- a/scripts/build/shellcheck.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash - -set -u - -interesting_paths=("scripts") - -SCRIPT_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")" -status=0 - -for path in "${interesting_paths[@]}"; do - find "$SCRIPT_DIR/../../$path" -name '*.sh' -type f -print0 | xargs -0 -n1 shellcheck -x - test $? -ne 0 && status=1 -done - -exit "$status" \ No newline at end of file diff --git a/test/asserts.py b/test/asserts.py index b24adf0..3714fdc 100644 --- a/test/asserts.py +++ b/test/asserts.py @@ -8,4 +8,4 @@ def not_raises(exception): try: yield except exception: - raise pytest.fail("DID RAISE {0}".format(exception)) + raise pytest.fail(f"DID RAISE {exception}") diff --git a/test/conftest.py b/test/conftest.py index a86cd6c..ccc7196 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -5,13 +5,23 @@ from pathlib import Path from tempfile import TemporaryDirectory from unittest import mock -from unittest.mock import patch, MagicMock +from unittest.mock import ( + MagicMock, + patch, +) import pytest -from exasol_script_languages_container_ci.lib.config.config_data_model import Config, Build, Ignore, Release -from exasol_script_languages_container_ci.lib.config.data_model_generator import config_data_model_default_output_file, \ - regenerate_config_data_model +from exasol_script_languages_container_ci.lib.config.config_data_model import ( + Build, + Config, + Ignore, + Release, +) +from exasol_script_languages_container_ci.lib.config.data_model_generator import ( + config_data_model_default_output_file, + regenerate_config_data_model, +) script_path = Path(__file__).absolute().parent @@ -22,8 +32,10 @@ def pytest_addoption(parser): parser.addoption( - DISABLE_PYDANTIC_MODEL_GENERATION, action="store_true", default=False, - help="Disables the generation of the pydantic models from the json schemas" + DISABLE_PYDANTIC_MODEL_GENERATION, + action="store_true", + default=False, + help="Disables the generation of the pydantic models from the json schemas", ) @@ -43,7 +55,7 @@ def pytest_configure(config): @pytest.fixture def resources_path() -> Path: - return script_path / "integration_tests/resources" + return script_path / "integration/resources" @pytest.fixture @@ -78,10 +90,9 @@ def tmp_test_dir(): @pytest.fixture def build_config() -> Config: return Config( - build=Build( - ignore=Ignore(paths=["doc"]), - base_branch="master"), - release=Release(timeout_in_minutes=1)) + build=Build(ignore=Ignore(paths=["doc"]), base_branch="master"), + release=Release(timeout_in_minutes=1), + ) @pytest.fixture() @@ -99,7 +110,8 @@ def git_access_mock(): @pytest.fixture def expected_json_config() -> str: - json = cleandoc(""" + json = cleandoc( + """ { "build": { "ignore": { @@ -113,5 +125,6 @@ def expected_json_config() -> str: "release": { "timeout_in_minutes": 1 } - }""") + }""" + ) return json diff --git a/test/contract_tests/__init__.py b/test/contract/__init__.py similarity index 100% rename from test/contract_tests/__init__.py rename to test/contract/__init__.py diff --git a/test/contract_tests/test_ci_test.py b/test/contract/test_ci_test.py similarity index 57% rename from test/contract_tests/test_ci_test.py rename to test/contract/test_ci_test.py index 2188a31..38c5c5f 100644 --- a/test/contract_tests/test_ci_test.py +++ b/test/contract/test_ci_test.py @@ -1,5 +1,7 @@ import pytest -from exasol_integration_test_docker_environment.lib.api.api_errors import TaskRuntimeError +from exasol_integration_test_docker_environment.lib.api.api_errors import ( + TaskRuntimeError, +) from exasol_script_languages_container_ci.lib.ci_test import DBTestRunnerProtocol @@ -22,10 +24,15 @@ def db_test_runner(self) -> DBTestRunnerProtocol: raise NotImplementedError() def test(self, db_test_runner, test_container, flavor_path): - result = db_test_runner.run(flavor_path=(flavor_path,), test_folder=(), - release_goal=('release',), workers=7, - docker_username=None, docker_password=None, - test_container_folder=test_container) + result = db_test_runner.run( + flavor_path=(flavor_path,), + test_folder=(), + release_goal=("release",), + workers=7, + docker_username=None, + docker_password=None, + test_container_folder=test_container, + ) assert result.tests_are_ok and result.command_line_output_path.exists() @@ -36,11 +43,15 @@ def db_test_runner(self) -> DBTestRunnerProtocol: raise NotImplementedError() def test(self, db_test_runner, test_container, flavor_path): - result = db_test_runner.run(flavor_path=(flavor_path,), workers=7, - test_folder=('test/linker_namespace_sanity',), - release_goal=('base_test_build_run',), - docker_username=None, docker_password=None, - test_container_folder=test_container) + result = db_test_runner.run( + flavor_path=(flavor_path,), + workers=7, + test_folder=("test/linker_namespace_sanity",), + release_goal=("base_test_build_run",), + docker_username=None, + docker_password=None, + test_container_folder=test_container, + ) assert result.tests_are_ok and result.command_line_output_path.exists() @@ -62,23 +73,34 @@ def db_test_runner(self) -> DBTestRunnerProtocol: raise NotImplementedError() def test(self, db_test_runner, test_container, flavor_path): - result = db_test_runner.run(flavor_path=(flavor_path,), test_folder=(), - release_goal=('release',), workers=7, - docker_username=None, docker_password=None, - test_container_folder=test_container) + result = db_test_runner.run( + flavor_path=(flavor_path,), + test_folder=(), + release_goal=("release",), + workers=7, + docker_username=None, + docker_password=None, + test_container_folder=test_container, + ) assert not result.tests_are_ok and result.command_line_output_path.exists() -class FailingRunDBTestFlavorLinkerNamespaceTestsContract(FailingRunDBTestFlavorContract): +class FailingRunDBTestFlavorLinkerNamespaceTestsContract( + FailingRunDBTestFlavorContract +): @pytest.fixture def db_test_runner(self) -> DBTestRunnerProtocol: raise NotImplementedError() def test(self, db_test_runner, test_container, flavor_path): - result = db_test_runner.run(flavor_path=(flavor_path,), workers=7, - test_folder=('linker_namespace_sanity',), - release_goal=('base_test_build_run',), - docker_username=None, docker_password=None, - test_container_folder=test_container) + result = db_test_runner.run( + flavor_path=(flavor_path,), + workers=7, + test_folder=("linker_namespace_sanity",), + release_goal=("base_test_build_run",), + docker_username=None, + docker_password=None, + test_container_folder=test_container, + ) assert not result.tests_are_ok and result.command_line_output_path.exists() diff --git a/test/integration_tests/__init__.py b/test/integration/__init__.py similarity index 100% rename from test/integration_tests/__init__.py rename to test/integration/__init__.py diff --git a/test/integration_tests/resources/__init__.py b/test/integration/resources/__init__.py similarity index 100% rename from test/integration_tests/resources/__init__.py rename to test/integration/resources/__init__.py diff --git a/test/integration_tests/resources/flavors/__init__.py b/test/integration/resources/flavors/__init__.py similarity index 100% rename from test/integration_tests/resources/flavors/__init__.py rename to test/integration/resources/flavors/__init__.py diff --git a/test/integration_tests/resources/flavors/failing_run_db_test/__init__.py b/test/integration/resources/flavors/failing_run_db_test/__init__.py similarity index 100% rename from test/integration_tests/resources/flavors/failing_run_db_test/__init__.py rename to test/integration/resources/flavors/failing_run_db_test/__init__.py diff --git a/test/integration_tests/resources/flavors/failing_run_db_test/flavor_base/__init__.py b/test/integration/resources/flavors/failing_run_db_test/flavor_base/__init__.py similarity index 100% rename from test/integration_tests/resources/flavors/failing_run_db_test/flavor_base/__init__.py rename to test/integration/resources/flavors/failing_run_db_test/flavor_base/__init__.py diff --git a/test/integration_tests/resources/flavors/failing_run_db_test/flavor_base/base_test_build_run/Dockerfile b/test/integration/resources/flavors/failing_run_db_test/flavor_base/base_test_build_run/Dockerfile similarity index 100% rename from test/integration_tests/resources/flavors/failing_run_db_test/flavor_base/base_test_build_run/Dockerfile rename to test/integration/resources/flavors/failing_run_db_test/flavor_base/base_test_build_run/Dockerfile diff --git a/test/integration_tests/resources/flavors/failing_run_db_test/flavor_base/build_steps.py b/test/integration/resources/flavors/failing_run_db_test/flavor_base/build_steps.py similarity index 100% rename from test/integration_tests/resources/flavors/failing_run_db_test/flavor_base/build_steps.py rename to test/integration/resources/flavors/failing_run_db_test/flavor_base/build_steps.py diff --git a/test/integration_tests/resources/flavors/failing_run_db_test/flavor_base/flavor_test_build_run/Dockerfile b/test/integration/resources/flavors/failing_run_db_test/flavor_base/flavor_test_build_run/Dockerfile similarity index 100% rename from test/integration_tests/resources/flavors/failing_run_db_test/flavor_base/flavor_test_build_run/Dockerfile rename to test/integration/resources/flavors/failing_run_db_test/flavor_base/flavor_test_build_run/Dockerfile diff --git a/test/integration_tests/resources/flavors/failing_run_db_test/flavor_base/language_definition b/test/integration/resources/flavors/failing_run_db_test/flavor_base/language_definition similarity index 100% rename from test/integration_tests/resources/flavors/failing_run_db_test/flavor_base/language_definition rename to test/integration/resources/flavors/failing_run_db_test/flavor_base/language_definition diff --git a/test/integration_tests/resources/flavors/failing_run_db_test/flavor_base/release/Dockerfile b/test/integration/resources/flavors/failing_run_db_test/flavor_base/release/Dockerfile similarity index 100% rename from test/integration_tests/resources/flavors/failing_run_db_test/flavor_base/release/Dockerfile rename to test/integration/resources/flavors/failing_run_db_test/flavor_base/release/Dockerfile diff --git a/test/integration_tests/resources/flavors/failing_run_db_test/flavor_base/security_scan/Dockerfile b/test/integration/resources/flavors/failing_run_db_test/flavor_base/security_scan/Dockerfile similarity index 100% rename from test/integration_tests/resources/flavors/failing_run_db_test/flavor_base/security_scan/Dockerfile rename to test/integration/resources/flavors/failing_run_db_test/flavor_base/security_scan/Dockerfile diff --git a/test/integration_tests/resources/flavors/failing_run_db_test/flavor_base/security_scan/test.sh b/test/integration/resources/flavors/failing_run_db_test/flavor_base/security_scan/test.sh similarity index 100% rename from test/integration_tests/resources/flavors/failing_run_db_test/flavor_base/security_scan/test.sh rename to test/integration/resources/flavors/failing_run_db_test/flavor_base/security_scan/test.sh diff --git a/test/integration_tests/resources/flavors/failing_run_db_test/flavor_base/testconfig b/test/integration/resources/flavors/failing_run_db_test/flavor_base/testconfig similarity index 100% rename from test/integration_tests/resources/flavors/failing_run_db_test/flavor_base/testconfig rename to test/integration/resources/flavors/failing_run_db_test/flavor_base/testconfig diff --git a/test/integration_tests/resources/flavors/failing_run_db_test/flavor_customization/Dockerfile b/test/integration/resources/flavors/failing_run_db_test/flavor_customization/Dockerfile similarity index 100% rename from test/integration_tests/resources/flavors/failing_run_db_test/flavor_customization/Dockerfile rename to test/integration/resources/flavors/failing_run_db_test/flavor_customization/Dockerfile diff --git a/test/integration_tests/resources/flavors/failing_security_scan/__init__.py b/test/integration/resources/flavors/failing_security_scan/__init__.py similarity index 100% rename from test/integration_tests/resources/flavors/failing_security_scan/__init__.py rename to test/integration/resources/flavors/failing_security_scan/__init__.py diff --git a/test/integration_tests/resources/flavors/failing_security_scan/flavor_base/__init__.py b/test/integration/resources/flavors/failing_security_scan/flavor_base/__init__.py similarity index 100% rename from test/integration_tests/resources/flavors/failing_security_scan/flavor_base/__init__.py rename to test/integration/resources/flavors/failing_security_scan/flavor_base/__init__.py diff --git a/test/integration_tests/resources/flavors/failing_security_scan/flavor_base/base_test_build_run/Dockerfile b/test/integration/resources/flavors/failing_security_scan/flavor_base/base_test_build_run/Dockerfile similarity index 100% rename from test/integration_tests/resources/flavors/failing_security_scan/flavor_base/base_test_build_run/Dockerfile rename to test/integration/resources/flavors/failing_security_scan/flavor_base/base_test_build_run/Dockerfile diff --git a/test/integration_tests/resources/flavors/failing_security_scan/flavor_base/build_steps.py b/test/integration/resources/flavors/failing_security_scan/flavor_base/build_steps.py similarity index 100% rename from test/integration_tests/resources/flavors/failing_security_scan/flavor_base/build_steps.py rename to test/integration/resources/flavors/failing_security_scan/flavor_base/build_steps.py diff --git a/test/integration_tests/resources/flavors/failing_security_scan/flavor_base/flavor_test_build_run/Dockerfile b/test/integration/resources/flavors/failing_security_scan/flavor_base/flavor_test_build_run/Dockerfile similarity index 100% rename from test/integration_tests/resources/flavors/failing_security_scan/flavor_base/flavor_test_build_run/Dockerfile rename to test/integration/resources/flavors/failing_security_scan/flavor_base/flavor_test_build_run/Dockerfile diff --git a/test/integration_tests/resources/flavors/failing_security_scan/flavor_base/language_definition b/test/integration/resources/flavors/failing_security_scan/flavor_base/language_definition similarity index 100% rename from test/integration_tests/resources/flavors/failing_security_scan/flavor_base/language_definition rename to test/integration/resources/flavors/failing_security_scan/flavor_base/language_definition diff --git a/test/integration_tests/resources/flavors/failing_security_scan/flavor_base/release/Dockerfile b/test/integration/resources/flavors/failing_security_scan/flavor_base/release/Dockerfile similarity index 100% rename from test/integration_tests/resources/flavors/failing_security_scan/flavor_base/release/Dockerfile rename to test/integration/resources/flavors/failing_security_scan/flavor_base/release/Dockerfile diff --git a/test/integration_tests/resources/flavors/failing_security_scan/flavor_base/security_scan/Dockerfile b/test/integration/resources/flavors/failing_security_scan/flavor_base/security_scan/Dockerfile similarity index 100% rename from test/integration_tests/resources/flavors/failing_security_scan/flavor_base/security_scan/Dockerfile rename to test/integration/resources/flavors/failing_security_scan/flavor_base/security_scan/Dockerfile diff --git a/test/integration_tests/resources/flavors/failing_security_scan/flavor_base/security_scan/test.sh b/test/integration/resources/flavors/failing_security_scan/flavor_base/security_scan/test.sh similarity index 100% rename from test/integration_tests/resources/flavors/failing_security_scan/flavor_base/security_scan/test.sh rename to test/integration/resources/flavors/failing_security_scan/flavor_base/security_scan/test.sh diff --git a/test/integration_tests/resources/flavors/failing_security_scan/flavor_base/testconfig b/test/integration/resources/flavors/failing_security_scan/flavor_base/testconfig similarity index 100% rename from test/integration_tests/resources/flavors/failing_security_scan/flavor_base/testconfig rename to test/integration/resources/flavors/failing_security_scan/flavor_base/testconfig diff --git a/test/integration_tests/resources/flavors/failing_security_scan/flavor_customization/Dockerfile b/test/integration/resources/flavors/failing_security_scan/flavor_customization/Dockerfile similarity index 100% rename from test/integration_tests/resources/flavors/failing_security_scan/flavor_customization/Dockerfile rename to test/integration/resources/flavors/failing_security_scan/flavor_customization/Dockerfile diff --git a/test/integration_tests/resources/flavors/successful/__init__.py b/test/integration/resources/flavors/successful/__init__.py similarity index 100% rename from test/integration_tests/resources/flavors/successful/__init__.py rename to test/integration/resources/flavors/successful/__init__.py diff --git a/test/integration_tests/resources/flavors/successful/flavor_base/__init__.py b/test/integration/resources/flavors/successful/flavor_base/__init__.py similarity index 100% rename from test/integration_tests/resources/flavors/successful/flavor_base/__init__.py rename to test/integration/resources/flavors/successful/flavor_base/__init__.py diff --git a/test/integration_tests/resources/flavors/successful/flavor_base/base_test_build_run/Dockerfile b/test/integration/resources/flavors/successful/flavor_base/base_test_build_run/Dockerfile similarity index 100% rename from test/integration_tests/resources/flavors/successful/flavor_base/base_test_build_run/Dockerfile rename to test/integration/resources/flavors/successful/flavor_base/base_test_build_run/Dockerfile diff --git a/test/integration_tests/resources/flavors/successful/flavor_base/build_steps.py b/test/integration/resources/flavors/successful/flavor_base/build_steps.py similarity index 100% rename from test/integration_tests/resources/flavors/successful/flavor_base/build_steps.py rename to test/integration/resources/flavors/successful/flavor_base/build_steps.py diff --git a/test/integration_tests/resources/flavors/successful/flavor_base/flavor_test_build_run/Dockerfile b/test/integration/resources/flavors/successful/flavor_base/flavor_test_build_run/Dockerfile similarity index 100% rename from test/integration_tests/resources/flavors/successful/flavor_base/flavor_test_build_run/Dockerfile rename to test/integration/resources/flavors/successful/flavor_base/flavor_test_build_run/Dockerfile diff --git a/test/integration_tests/resources/flavors/successful/flavor_base/language_definition b/test/integration/resources/flavors/successful/flavor_base/language_definition similarity index 100% rename from test/integration_tests/resources/flavors/successful/flavor_base/language_definition rename to test/integration/resources/flavors/successful/flavor_base/language_definition diff --git a/test/integration_tests/resources/flavors/successful/flavor_base/release/Dockerfile b/test/integration/resources/flavors/successful/flavor_base/release/Dockerfile similarity index 100% rename from test/integration_tests/resources/flavors/successful/flavor_base/release/Dockerfile rename to test/integration/resources/flavors/successful/flavor_base/release/Dockerfile diff --git a/test/integration_tests/resources/flavors/successful/flavor_base/security_scan/Dockerfile b/test/integration/resources/flavors/successful/flavor_base/security_scan/Dockerfile similarity index 100% rename from test/integration_tests/resources/flavors/successful/flavor_base/security_scan/Dockerfile rename to test/integration/resources/flavors/successful/flavor_base/security_scan/Dockerfile diff --git a/test/integration_tests/resources/flavors/successful/flavor_base/security_scan/test.sh b/test/integration/resources/flavors/successful/flavor_base/security_scan/test.sh similarity index 100% rename from test/integration_tests/resources/flavors/successful/flavor_base/security_scan/test.sh rename to test/integration/resources/flavors/successful/flavor_base/security_scan/test.sh diff --git a/test/integration_tests/resources/flavors/successful/flavor_base/testconfig b/test/integration/resources/flavors/successful/flavor_base/testconfig similarity index 100% rename from test/integration_tests/resources/flavors/successful/flavor_base/testconfig rename to test/integration/resources/flavors/successful/flavor_base/testconfig diff --git a/test/integration_tests/resources/flavors/successful/flavor_customization/Dockerfile b/test/integration/resources/flavors/successful/flavor_customization/Dockerfile similarity index 100% rename from test/integration_tests/resources/flavors/successful/flavor_customization/Dockerfile rename to test/integration/resources/flavors/successful/flavor_customization/Dockerfile diff --git a/test/integration_tests/resources/test_containers/__init__.py b/test/integration/resources/test_containers/__init__.py similarity index 100% rename from test/integration_tests/resources/test_containers/__init__.py rename to test/integration/resources/test_containers/__init__.py diff --git a/test/integration_tests/resources/test_containers/failing/__init__.py b/test/integration/resources/test_containers/failing/__init__.py similarity index 100% rename from test/integration_tests/resources/test_containers/failing/__init__.py rename to test/integration/resources/test_containers/failing/__init__.py diff --git a/test/integration_tests/resources/test_containers/failing/build/Dockerfile b/test/integration/resources/test_containers/failing/build/Dockerfile similarity index 100% rename from test/integration_tests/resources/test_containers/failing/build/Dockerfile rename to test/integration/resources/test_containers/failing/build/Dockerfile diff --git a/test/integration_tests/resources/test_containers/failing/build/deps/01_nodoc b/test/integration/resources/test_containers/failing/build/deps/01_nodoc similarity index 100% rename from test/integration_tests/resources/test_containers/failing/build/deps/01_nodoc rename to test/integration/resources/test_containers/failing/build/deps/01_nodoc diff --git a/test/integration_tests/resources/test_containers/failing/build/deps/requirements.txt b/test/integration/resources/test_containers/failing/build/deps/requirements.txt similarity index 100% rename from test/integration_tests/resources/test_containers/failing/build/deps/requirements.txt rename to test/integration/resources/test_containers/failing/build/deps/requirements.txt diff --git a/test/integration_tests/resources/test_containers/failing/test_data/enginedb_small/import.sql b/test/integration/resources/test_containers/failing/test_data/enginedb_small/import.sql similarity index 100% rename from test/integration_tests/resources/test_containers/failing/test_data/enginedb_small/import.sql rename to test/integration/resources/test_containers/failing/test_data/enginedb_small/import.sql diff --git a/test/integration_tests/resources/test_containers/failing/tests/__init__.py b/test/integration/resources/test_containers/failing/tests/__init__.py similarity index 100% rename from test/integration_tests/resources/test_containers/failing/tests/__init__.py rename to test/integration/resources/test_containers/failing/tests/__init__.py diff --git a/test/integration_tests/resources/test_containers/failing/tests/test/__init__.py b/test/integration/resources/test_containers/failing/tests/test/__init__.py similarity index 100% rename from test/integration_tests/resources/test_containers/failing/tests/test/__init__.py rename to test/integration/resources/test_containers/failing/tests/test/__init__.py diff --git a/test/integration_tests/resources/test_containers/failing/tests/test/failing_test/__init__.py b/test/integration/resources/test_containers/failing/tests/test/failing_test/__init__.py similarity index 100% rename from test/integration_tests/resources/test_containers/failing/tests/test/failing_test/__init__.py rename to test/integration/resources/test_containers/failing/tests/test/failing_test/__init__.py diff --git a/test/integration_tests/resources/test_containers/failing/tests/test/failing_test/failing_test.py b/test/integration/resources/test_containers/failing/tests/test/failing_test/failing_test.py similarity index 100% rename from test/integration_tests/resources/test_containers/failing/tests/test/failing_test/failing_test.py rename to test/integration/resources/test_containers/failing/tests/test/failing_test/failing_test.py diff --git a/test/integration_tests/resources/test_containers/failing/tests/test/linker_namespace_sanity/__init__.py b/test/integration/resources/test_containers/failing/tests/test/linker_namespace_sanity/__init__.py similarity index 100% rename from test/integration_tests/resources/test_containers/failing/tests/test/linker_namespace_sanity/__init__.py rename to test/integration/resources/test_containers/failing/tests/test/linker_namespace_sanity/__init__.py diff --git a/test/integration_tests/resources/test_containers/failing/tests/test/linker_namespace_sanity/failing_test.py b/test/integration/resources/test_containers/failing/tests/test/linker_namespace_sanity/failing_test.py similarity index 100% rename from test/integration_tests/resources/test_containers/failing/tests/test/linker_namespace_sanity/failing_test.py rename to test/integration/resources/test_containers/failing/tests/test/linker_namespace_sanity/failing_test.py diff --git a/test/integration_tests/resources/test_containers/successful/__init__.py b/test/integration/resources/test_containers/successful/__init__.py similarity index 100% rename from test/integration_tests/resources/test_containers/successful/__init__.py rename to test/integration/resources/test_containers/successful/__init__.py diff --git a/test/integration_tests/resources/test_containers/successful/build/Dockerfile b/test/integration/resources/test_containers/successful/build/Dockerfile similarity index 100% rename from test/integration_tests/resources/test_containers/successful/build/Dockerfile rename to test/integration/resources/test_containers/successful/build/Dockerfile diff --git a/test/integration_tests/resources/test_containers/successful/build/deps/01_nodoc b/test/integration/resources/test_containers/successful/build/deps/01_nodoc similarity index 100% rename from test/integration_tests/resources/test_containers/successful/build/deps/01_nodoc rename to test/integration/resources/test_containers/successful/build/deps/01_nodoc diff --git a/test/integration_tests/resources/test_containers/successful/build/deps/requirements.txt b/test/integration/resources/test_containers/successful/build/deps/requirements.txt similarity index 100% rename from test/integration_tests/resources/test_containers/successful/build/deps/requirements.txt rename to test/integration/resources/test_containers/successful/build/deps/requirements.txt diff --git a/test/integration_tests/resources/test_containers/successful/test_data/enginedb_small/import.sql b/test/integration/resources/test_containers/successful/test_data/enginedb_small/import.sql similarity index 100% rename from test/integration_tests/resources/test_containers/successful/test_data/enginedb_small/import.sql rename to test/integration/resources/test_containers/successful/test_data/enginedb_small/import.sql diff --git a/test/integration_tests/resources/test_containers/successful/tests/__init__.py b/test/integration/resources/test_containers/successful/tests/__init__.py similarity index 100% rename from test/integration_tests/resources/test_containers/successful/tests/__init__.py rename to test/integration/resources/test_containers/successful/tests/__init__.py diff --git a/test/integration_tests/resources/test_containers/successful/tests/test/__init__.py b/test/integration/resources/test_containers/successful/tests/test/__init__.py similarity index 100% rename from test/integration_tests/resources/test_containers/successful/tests/test/__init__.py rename to test/integration/resources/test_containers/successful/tests/test/__init__.py diff --git a/test/integration_tests/resources/test_containers/successful/tests/test/linker_namespace_sanity/__init__.py b/test/integration/resources/test_containers/successful/tests/test/linker_namespace_sanity/__init__.py similarity index 100% rename from test/integration_tests/resources/test_containers/successful/tests/test/linker_namespace_sanity/__init__.py rename to test/integration/resources/test_containers/successful/tests/test/linker_namespace_sanity/__init__.py diff --git a/test/integration_tests/resources/test_containers/successful/tests/test/linker_namespace_sanity/successful_test.py b/test/integration/resources/test_containers/successful/tests/test/linker_namespace_sanity/successful_test.py similarity index 100% rename from test/integration_tests/resources/test_containers/successful/tests/test/linker_namespace_sanity/successful_test.py rename to test/integration/resources/test_containers/successful/tests/test/linker_namespace_sanity/successful_test.py diff --git a/test/integration_tests/resources/test_containers/successful/tests/test/successful_test/__init__.py b/test/integration/resources/test_containers/successful/tests/test/successful_test/__init__.py similarity index 100% rename from test/integration_tests/resources/test_containers/successful/tests/test/successful_test/__init__.py rename to test/integration/resources/test_containers/successful/tests/test/successful_test/__init__.py diff --git a/test/integration_tests/resources/test_containers/successful/tests/test/successful_test/successful_test.py b/test/integration/resources/test_containers/successful/tests/test/successful_test/successful_test.py similarity index 100% rename from test/integration_tests/resources/test_containers/successful/tests/test/successful_test/successful_test.py rename to test/integration/resources/test_containers/successful/tests/test/successful_test/successful_test.py diff --git a/test/integration_tests/test_ci_build.py b/test/integration/test_ci_build.py similarity index 62% rename from test/integration_tests/test_ci_build.py rename to test/integration/test_ci_build.py index a39b95f..9444330 100644 --- a/test/integration_tests/test_ci_build.py +++ b/test/integration/test_ci_build.py @@ -1,27 +1,35 @@ import logging import os from pathlib import Path +from test.asserts import not_raises import pytest from exasol_script_languages_container_ci.lib.ci_build import CIBuild -from test.asserts import not_raises testdata = [ - ("test_docker_build_repository", "test_commit_sha", "test_docker_build_repository", "test_commit_sha_"), - (None, "", "exasol/script-language-container", "") + ( + "test_docker_build_repository", + "test_commit_sha", + "test_docker_build_repository", + "test_commit_sha_", + ), + (None, "", "exasol/script-language-container", ""), ] @pytest.mark.parametrize( "input_docker_build_repository,input_commit_sha,expected_docker_build_repository,expected_source_tag_prefix", - testdata) -def test(input_docker_build_repository, - input_commit_sha, - expected_docker_build_repository, - expected_source_tag_prefix, - flavors_path, - test_containers_folder): + testdata, +) +def test( + input_docker_build_repository, + input_commit_sha, + expected_docker_build_repository, + expected_source_tag_prefix, + flavors_path, + test_containers_folder, +): test_type = "successful" flavor_path = str(flavors_path / test_type) test_container_folder = str(test_containers_folder / test_type) @@ -33,5 +41,5 @@ def test(input_docker_build_repository, build_docker_repository=input_docker_build_repository, docker_user=None, docker_password=None, - test_container_folder=test_container_folder + test_container_folder=test_container_folder, ) diff --git a/test/integration_tests/test_ci_export.py b/test/integration/test_ci_export.py similarity index 80% rename from test/integration_tests/test_ci_export.py rename to test/integration/test_ci_export.py index cbf790c..31be4e0 100644 --- a/test/integration_tests/test_ci_export.py +++ b/test/integration/test_ci_export.py @@ -11,5 +11,7 @@ def test(flavors_path): CIExport().export(flavor_path=(flavor_path,), export_path=temp_dir) temp_dir_path = Path(temp_dir) temp_dir_content = set(temp_dir_path.iterdir()) - files_start_with_flavor_name = [str(file.name).startswith(flavor_name) for file in temp_dir_content] + files_start_with_flavor_name = [ + str(file.name).startswith(flavor_name) for file in temp_dir_content + ] assert len(temp_dir_content) == 2 and all(files_start_with_flavor_name) diff --git a/test/integration_tests/test_ci_prepare.py b/test/integration/test_ci_prepare.py similarity index 85% rename from test/integration_tests/test_ci_prepare.py rename to test/integration/test_ci_prepare.py index 6f70f99..e584b9a 100644 --- a/test/integration_tests/test_ci_prepare.py +++ b/test/integration/test_ci_prepare.py @@ -8,9 +8,8 @@ from exasol_script_languages_container_ci.lib.ci_build import CIBuild from exasol_script_languages_container_ci.lib.ci_prepare import CIPrepare -def test(flavors_path, - test_containers_folder, - mock_settings_env_vars): + +def test(flavors_path, test_containers_folder, mock_settings_env_vars): test_type = "successful" flavor_path = str(flavors_path / test_type) test_container_folder = str(test_containers_folder / test_type) @@ -22,7 +21,7 @@ def test(flavors_path, build_docker_repository="input_docker_build_repository", docker_user=None, docker_password=None, - test_container_folder=test_container_folder + test_container_folder=test_container_folder, ) log_path = Path(os.environ[luigi_log_config.LOG_ENV_VARIABLE_NAME]) assert log_path.is_file() diff --git a/test/integration/test_ci_push.py b/test/integration/test_ci_push.py new file mode 100644 index 0000000..685d9a7 --- /dev/null +++ b/test/integration/test_ci_push.py @@ -0,0 +1,31 @@ +from pathlib import Path + +from exasol_integration_test_docker_environment.testing.docker_registry import ( + LocalDockerRegistryContextManager, +) + +from exasol_script_languages_container_ci.lib.ci_push import CIPush + + +def test(flavors_path): + flavor_name = "successful" + flavor_path = str(flavors_path / flavor_name) + with LocalDockerRegistryContextManager("test_ci_push") as registry: + CIPush().push( + flavor_path=(flavor_path,), + target_docker_repository=registry.name, + target_docker_tag_prefix="tag", + docker_user=None, + docker_password=None, + ) + expected_images = { + "name": "test_ci_push", + "tags": [ + f"tag_{flavor_name}-base_test_build_run_GUA7R5J3UM27WOHJSQPX2OJNSIEKWCM5YF5GJXKKXZI53LZPV75Q", + f"tag_{flavor_name}-flavor_test_build_run_G2OIMXJ2S3VS2EUAQNW4KWQLX3B2C27XYZ2SDMF7TQRS3UMAUWJQ", + f"tag_{flavor_name}-release_MNWZZGSSFQ6VCLBDH7CZBEZC4K35QQBSLOW5DSYHF3DFFDX2OOZQ", + ], + } + assert expected_images["name"] == registry.images["name"] and set( + expected_images["tags"] + ) == set(registry.images["tags"]) diff --git a/test/integration_tests/test_ci_security_scan.py b/test/integration/test_ci_security_scan.py similarity index 73% rename from test/integration_tests/test_ci_security_scan.py rename to test/integration/test_ci_security_scan.py index ecc9c63..e95e474 100644 --- a/test/integration_tests/test_ci_security_scan.py +++ b/test/integration/test_ci_security_scan.py @@ -1,34 +1,37 @@ -from unittest.mock import call, create_autospec +from test.matchers import file_exists_matcher +from unittest.mock import ( + call, + create_autospec, +) import pytest from exasol_script_languages_container_ci.lib.ci_security_scan import CISecurityScan -from exasol_script_languages_container_ci.lib.ci_step_output_printer import CIStepOutputPrinterProtocol -from test.matchers import file_exists_matcher +from exasol_script_languages_container_ci.lib.ci_step_output_printer import ( + CIStepOutputPrinterProtocol, +) def test_successful_flavor(flavors_path, test_containers_folder): flavor_path = str(flavors_path / "successful") printer_mock = create_autospec(CIStepOutputPrinterProtocol) - CISecurityScan( - printer=printer_mock - ).run_security_scan( + CISecurityScan(printer=printer_mock).run_security_scan( flavor_path=(flavor_path,), ) assert printer_mock.mock_calls == [ call.print_file(file_exists_matcher()), - call.print_exasol_docker_images()] + call.print_exasol_docker_images(), + ] def test_failing_security_scan(flavors_path): flavor_path = str(flavors_path / "failing_security_scan") printer_mock = create_autospec(CIStepOutputPrinterProtocol) with pytest.raises(AssertionError, match="Some security scans not successful."): - CISecurityScan( - printer=printer_mock - ).run_security_scan( + CISecurityScan(printer=printer_mock).run_security_scan( flavor_path=(flavor_path,), ) assert printer_mock.mock_calls == [ call.print_file(file_exists_matcher()), - call.print_exasol_docker_images()] + call.print_exasol_docker_images(), + ] diff --git a/test/integration_tests/test_ci_test.py b/test/integration/test_ci_test.py similarity index 51% rename from test/integration_tests/test_ci_test.py rename to test/integration/test_ci_test.py index 718f027..ab2f828 100644 --- a/test/integration_tests/test_ci_test.py +++ b/test/integration/test_ci_test.py @@ -1,9 +1,16 @@ +from test.contract.test_ci_test import ( + FailingRunDBTestFlavorDBTestsContract, + FailingRunDBTestFlavorLinkerNamespaceTestsContract, + SuccessfulFlavorDBTestsContract, + SuccessfulFlavorLinkerNamespaceTestsContract, +) + import pytest -from exasol_script_languages_container_ci.lib.ci_test import DBTestRunner, DBTestRunnerProtocol -from test.contract_tests.test_ci_test import SuccessfulFlavorDBTestsContract, \ - SuccessfulFlavorLinkerNamespaceTestsContract, \ - FailingRunDBTestFlavorDBTestsContract, FailingRunDBTestFlavorLinkerNamespaceTestsContract +from exasol_script_languages_container_ci.lib.ci_test import ( + DBTestRunner, + DBTestRunnerProtocol, +) class TestSuccessfulFlavorDBTestsContract(SuccessfulFlavorDBTestsContract): @@ -13,7 +20,9 @@ def db_test_runner(self) -> DBTestRunnerProtocol: return DBTestRunner() -class TestSuccessfulFlavorLinkerNamespaceTestsContract(SuccessfulFlavorLinkerNamespaceTestsContract): +class TestSuccessfulFlavorLinkerNamespaceTestsContract( + SuccessfulFlavorLinkerNamespaceTestsContract +): @pytest.fixture def db_test_runner(self) -> DBTestRunnerProtocol: @@ -27,7 +36,9 @@ def db_test_runner(self) -> DBTestRunnerProtocol: return DBTestRunner() -class TestFailingRunDBTestFlavorLinkerNamespaceTestsContract(FailingRunDBTestFlavorLinkerNamespaceTestsContract): +class TestFailingRunDBTestFlavorLinkerNamespaceTestsContract( + FailingRunDBTestFlavorLinkerNamespaceTestsContract +): @pytest.fixture def db_test_runner(self) -> DBTestRunnerProtocol: diff --git a/test/integration_tests/test_ci_push.py b/test/integration_tests/test_ci_push.py deleted file mode 100644 index e540ed0..0000000 --- a/test/integration_tests/test_ci_push.py +++ /dev/null @@ -1,27 +0,0 @@ -from pathlib import Path - -from exasol_integration_test_docker_environment.testing.docker_registry import LocalDockerRegistryContextManager - -from exasol_script_languages_container_ci.lib.ci_push import CIPush - - -def test(flavors_path): - flavor_name = "successful" - flavor_path = str(flavors_path / flavor_name) - with LocalDockerRegistryContextManager("test_ci_push") as registry: - CIPush().push( - flavor_path=(flavor_path,), - target_docker_repository=registry.name, - target_docker_tag_prefix="tag", - docker_user=None, - docker_password=None - ) - expected_images = \ - {'name': 'test_ci_push', - 'tags': [ - f'tag_{flavor_name}-base_test_build_run_GUA7R5J3UM27WOHJSQPX2OJNSIEKWCM5YF5GJXKKXZI53LZPV75Q', - f'tag_{flavor_name}-flavor_test_build_run_G2OIMXJ2S3VS2EUAQNW4KWQLX3B2C27XYZ2SDMF7TQRS3UMAUWJQ', - f'tag_{flavor_name}-release_MNWZZGSSFQ6VCLBDH7CZBEZC4K35QQBSLOW5DSYHF3DFFDX2OOZQ', - ]} - assert expected_images["name"] == registry.images["name"] \ - and set(expected_images["tags"]) == set(registry.images["tags"]) diff --git a/test/mock_cast.py b/test/mock_cast.py index c6fc304..b6ca022 100644 --- a/test/mock_cast.py +++ b/test/mock_cast.py @@ -1,4 +1,7 @@ -from typing import cast, Any +from typing import ( + Any, + cast, +) from unittest.mock import Mock diff --git a/test/unit_tests/__init__.py b/test/unit/__init__.py similarity index 100% rename from test/unit_tests/__init__.py rename to test/unit/__init__.py diff --git a/test/unit/ci_calls.py b/test/unit/ci_calls.py new file mode 100644 index 0000000..7fa438f --- /dev/null +++ b/test/unit/ci_calls.py @@ -0,0 +1,81 @@ +from test.unit.test_env import test_env +from unittest.mock import call + + +def prepare(): + return call.prepare() + + +def build_ci_call(force_rebuild: bool): + return call.build( + flavor_path=("flavors/TEST_FLAVOR",), + rebuild=force_rebuild, + build_docker_repository=test_env.docker_build_repo, + commit_sha=test_env.commit_sha, + docker_user=test_env.docker_user, + docker_password=test_env.docker_pwd, + test_container_folder="test_container", + ) + + +def build_release_call(): + return call.build( + flavor_path=("flavors/TEST_FLAVOR",), + rebuild=True, + build_docker_repository=None, + commit_sha="", + docker_user=None, + docker_password=None, + test_container_folder="test_container", + ) + + +def run_db_test_call(): + return call.execute_tests( + flavor_path=("flavors/TEST_FLAVOR",), + docker_user=test_env.docker_user, + docker_password=test_env.docker_pwd, + test_container_folder="test_container", + ) + + +def security_scan_call(): + return call.run_security_scan(flavor_path=("flavors/TEST_FLAVOR",)) + + +def push_build_repo_with_sha_call(): + return call.push( + flavor_path=("flavors/TEST_FLAVOR",), + target_docker_repository=test_env.docker_build_repo, + target_docker_tag_prefix=test_env.commit_sha, + docker_user=test_env.docker_user, + docker_password=test_env.docker_pwd, + ) + + +def push_build_repo_without_sha_call(): + return call.push( + flavor_path=("flavors/TEST_FLAVOR",), + target_docker_repository=test_env.docker_build_repo, + target_docker_tag_prefix="", + docker_user=test_env.docker_user, + docker_password=test_env.docker_pwd, + ) + + +def push_release_repo(): + return call.push( + flavor_path=("flavors/TEST_FLAVOR",), + target_docker_repository=test_env.docker_release_repo, + target_docker_tag_prefix="", + docker_user=test_env.docker_user, + docker_password=test_env.docker_pwd, + ) + + +def release_upload(): + return call.release_upload( + flavor_path=("flavors/TEST_FLAVOR",), + source_repo_url="https://github.com/test_source_repo_url", + release_id=123, + ) diff --git a/test/unit_tests/test_asset_uploader.py b/test/unit/test_asset_uploader.py similarity index 69% rename from test/unit_tests/test_asset_uploader.py rename to test/unit/test_asset_uploader.py index 349f78b..3960472 100644 --- a/test/unit_tests/test_asset_uploader.py +++ b/test/unit/test_asset_uploader.py @@ -1,17 +1,25 @@ from pathlib import Path from tempfile import TemporaryDirectory +from test.mock_cast import mock_cast from typing import Union -from unittest.mock import create_autospec, MagicMock +from unittest.mock import ( + MagicMock, + create_autospec, +) from exasol_script_languages_container_ci.lib.asset_uploader import AssetUploader -from exasol_script_languages_container_ci.lib.github_release_asset_uploader import GithubReleaseAssetUploader -from test.mock_cast import mock_cast +from exasol_script_languages_container_ci.lib.github_release_asset_uploader import ( + GithubReleaseAssetUploader, +) -def test(): - github_release_asset_uploader_mock: Union[MagicMock, GithubReleaseAssetUploader] = \ +def test() -> None: + github_release_asset_uploader_mock: Union[MagicMock, GithubReleaseAssetUploader] = ( create_autospec(GithubReleaseAssetUploader) - asset_uploader = AssetUploader(release_asset_uploader=github_release_asset_uploader_mock) + ) + asset_uploader = AssetUploader( + release_asset_uploader=github_release_asset_uploader_mock + ) with TemporaryDirectory() as temp_dir: artifact_path = Path(temp_dir) release_artifact = artifact_path / "test_artifact.txt" @@ -23,12 +31,12 @@ def test(): file_suffix=".txt", content_type="test_content_type", label_prefix="test_label_prefix", - artifact_path=temp_dir + artifact_path=temp_dir, ) mock_cast(github_release_asset_uploader_mock.upload).assert_called_once_with( archive_path=str(release_artifact), - content_type='test_content_type', - label='test_label_prefix test_artifact', + content_type="test_content_type", + label="test_label_prefix test_artifact", release_id=123, - repo_id='test_repo_id' + repo_id="test_repo_id", ) diff --git a/test/unit/test_ci.py b/test/unit/test_ci.py new file mode 100644 index 0000000..e1c1a0e --- /dev/null +++ b/test/unit/test_ci.py @@ -0,0 +1,106 @@ +from test.unit import ci_calls +from test.unit.test_env import test_env +from typing import Union +from unittest.mock import Mock + +import pytest + +from exasol_script_languages_container_ci.lib.ci import ci +from exasol_script_languages_container_ci.lib.ci_build import CIBuild +from exasol_script_languages_container_ci.lib.ci_push import CIPush +from exasol_script_languages_container_ci.lib.ci_security_scan import CISecurityScan +from exasol_script_languages_container_ci.lib.ci_test import CIExecuteTest + +# Testdata contain tuples of (branch, list(calls to CICommands)) +# The goal is to test that for specific branches the correct list of calls (with expected arguments) is passed to the CICommands +testdata_ci = [ + ( + "refs/heads/feature/test_branch", + [ + ci_calls.prepare(), + ci_calls.build_ci_call(force_rebuild=False), + ci_calls.run_db_test_call(), + ci_calls.security_scan_call(), + ci_calls.push_build_repo_with_sha_call(), + ci_calls.push_build_repo_without_sha_call(), + ], + ), + ( + "refs/heads/rebuild/feature/test_branch", + [ + ci_calls.prepare(), + ci_calls.build_ci_call(force_rebuild=True), + ci_calls.run_db_test_call(), + ci_calls.security_scan_call(), + ci_calls.push_build_repo_with_sha_call(), + ci_calls.push_build_repo_without_sha_call(), + ], + ), + ( + "refs/heads/master", + [ + ci_calls.prepare(), + ci_calls.build_ci_call(force_rebuild=True), + ci_calls.run_db_test_call(), + ci_calls.security_scan_call(), + ci_calls.push_build_repo_with_sha_call(), + ci_calls.push_build_repo_without_sha_call(), + ci_calls.push_release_repo(), + ], + ), + ( + "refs/heads/main", + [ + ci_calls.prepare(), + ci_calls.build_ci_call(force_rebuild=True), + ci_calls.run_db_test_call(), + ci_calls.security_scan_call(), + ci_calls.push_build_repo_with_sha_call(), + ci_calls.push_build_repo_without_sha_call(), + ci_calls.push_release_repo(), + ], + ), + ( + "refs/heads/develop", + [ + ci_calls.prepare(), + ci_calls.build_ci_call(force_rebuild=True), + ci_calls.run_db_test_call(), + ci_calls.security_scan_call(), + ci_calls.push_build_repo_with_sha_call(), + ci_calls.push_build_repo_without_sha_call(), + ], + ), +] + + +@pytest.mark.parametrize("branch,expected_calls", testdata_ci) +def test_branches(branch, git_access_mock, expected_calls, build_config): + """ + Test that on for specific branches the correct steps are executed: + 1. Build Image (force_rebuild = true/false) + 2. Run db tests + 3. Security scan + 4. Push to docker build repo (with and without sha) + 5. Optionally: Push to docker release repo + """ + ci_commands_mock: Union[CIBuild, CIPush, CIExecuteTest, CISecurityScan, Mock] = ( + Mock() + ) + ci( + flavor="TEST_FLAVOR", + branch_name=branch, + docker_user=test_env.docker_user, + docker_password=test_env.docker_pwd, + docker_build_repository=test_env.docker_build_repo, + docker_release_repository=test_env.docker_release_repo, + commit_sha=test_env.commit_sha, + build_config=build_config, + git_access=git_access_mock, + ci_build=ci_commands_mock, + ci_push=ci_commands_mock, + ci_execute_tests=ci_commands_mock, + ci_security_scan=ci_commands_mock, + ci_prepare=ci_commands_mock, + ) + assert ci_commands_mock.mock_calls == expected_calls diff --git a/test/unit_tests/test_ci_prepare.py b/test/unit/test_ci_prepare.py similarity index 83% rename from test/unit_tests/test_ci_prepare.py rename to test/unit/test_ci_prepare.py index e889cef..838e5ee 100644 --- a/test/unit_tests/test_ci_prepare.py +++ b/test/unit/test_ci_prepare.py @@ -3,14 +3,13 @@ from unittest import mock import pytest -from exasol_integration_test_docker_environment.cli.options.system_options import DEFAULT_OUTPUT_DIRECTORY +from exasol_integration_test_docker_environment.cli.options.system_options import ( + DEFAULT_OUTPUT_DIRECTORY, +) from exasol_integration_test_docker_environment.lib.base import luigi_log_config from exasol_script_languages_container_ci.lib.ci_prepare import CIPrepare - - - EXPECTED_LOG_PARENT_DIRECTORY = Path(DEFAULT_OUTPUT_DIRECTORY) / "jobs" / "logs" EXPECTED_LOG_FILE = EXPECTED_LOG_PARENT_DIRECTORY / "main.log" @@ -20,7 +19,9 @@ def test_ci_prepare_log_environment_variable_is_set(mock_settings_env_vars): assert luigi_log_config.LOG_ENV_VARIABLE_NAME in os.environ -def test_ci_prepare_log_environment_variable_is_set_to_the_correct_path(mock_settings_env_vars): +def test_ci_prepare_log_environment_variable_is_set_to_the_correct_path( + mock_settings_env_vars, +): CIPrepare().prepare() expected_path = str(EXPECTED_LOG_FILE.absolute()) assert os.environ[luigi_log_config.LOG_ENV_VARIABLE_NAME] == expected_path @@ -40,7 +41,10 @@ def test_ci_prepare_log_path_file_doesnt_exist(mock_settings_env_vars): def test_ci_prepare_log_path_parent_directory_exist(mock_settings_env_vars): EXPECTED_LOG_PARENT_DIRECTORY.mkdir(parents=True) CIPrepare().prepare() - assert Path(os.environ[luigi_log_config.LOG_ENV_VARIABLE_NAME]).parent == EXPECTED_LOG_PARENT_DIRECTORY.absolute() + assert ( + Path(os.environ[luigi_log_config.LOG_ENV_VARIABLE_NAME]).parent + == EXPECTED_LOG_PARENT_DIRECTORY.absolute() + ) def test_ci_prepare_log_path_file_exists(mock_settings_env_vars): @@ -51,4 +55,6 @@ def test_ci_prepare_log_path_file_exists(mock_settings_env_vars): CIPrepare().prepare() actual_path = Path(os.environ[luigi_log_config.LOG_ENV_VARIABLE_NAME]) actual_value = actual_path.read_text() - assert actual_value == expected_value and actual_path == EXPECTED_LOG_FILE.absolute() + assert ( + actual_value == expected_value and actual_path == EXPECTED_LOG_FILE.absolute() + ) diff --git a/test/unit_tests/test_ci_test.py b/test/unit/test_ci_test.py similarity index 55% rename from test/unit_tests/test_ci_test.py rename to test/unit/test_ci_test.py index 1114905..ddec281 100644 --- a/test/unit_tests/test_ci_test.py +++ b/test/unit/test_ci_test.py @@ -1,14 +1,23 @@ from contextlib import suppress from pathlib import Path +from test.mock_cast import mock_cast from typing import Union -from unittest.mock import call, create_autospec, MagicMock +from unittest.mock import ( + MagicMock, + call, + create_autospec, +) import pytest from exasol.slc.models.test_result import AllTestsResult -from exasol_script_languages_container_ci.lib.ci_step_output_printer import CIStepOutputPrinterProtocol -from exasol_script_languages_container_ci.lib.ci_test import CIExecuteTest, DBTestRunnerProtocol -from test.mock_cast import mock_cast +from exasol_script_languages_container_ci.lib.ci_step_output_printer import ( + CIStepOutputPrinterProtocol, +) +from exasol_script_languages_container_ci.lib.ci_test import ( + CIExecuteTest, + DBTestRunnerProtocol, +) class BaseCIExecuteTest: @@ -24,13 +33,14 @@ def base_setup(self, db_test_runner): self.flavor_path = "test_flavor" self.test_container_folder = "test_container_folder" self.ci_execute_test = CIExecuteTest( - printer=self.printer_mock, - db_test_runner=db_test_runner + printer=self.printer_mock, db_test_runner=db_test_runner ) @staticmethod def create_all_tests_result_mock(tests_are_ok: bool): - all_tests_result: Union[MagicMock, AllTestsResult] = create_autospec(AllTestsResult) + all_tests_result: Union[MagicMock, AllTestsResult] = create_autospec( + AllTestsResult + ) all_tests_result.tests_are_ok = tests_are_ok all_tests_result.command_line_output_path = create_autospec(Path) return all_tests_result @@ -46,12 +56,24 @@ def execute_tests(self): @pytest.fixture() def run_db_tests_calls(self): return [ - call.run(flavor_path=(self.flavor_path,), test_folder=(), release_goal=('release',), workers=7, - docker_username=None, docker_password=None, - test_container_folder=self.test_container_folder), - call.run(flavor_path=(self.flavor_path,), workers=7, test_folder=('test/linker_namespace_sanity',), - release_goal=('base_test_build_run',), docker_username=None, docker_password=None, - test_container_folder=self.test_container_folder) + call.run( + flavor_path=(self.flavor_path,), + test_folder=(), + release_goal=("release",), + workers=7, + docker_username=None, + docker_password=None, + test_container_folder=self.test_container_folder, + ), + call.run( + flavor_path=(self.flavor_path,), + workers=7, + test_folder=("test/linker_namespace_sanity",), + release_goal=("base_test_build_run",), + docker_username=None, + docker_password=None, + test_container_folder=self.test_container_folder, + ), ] @@ -59,17 +81,26 @@ class TestSuccessfulFlavor(BaseCIExecuteTest): @pytest.fixture def complete_setup(self, base_setup): - self.db_tests_all_tests_result = self.create_all_tests_result_mock(tests_are_ok=True) - self.linker_namespace_tests_all_tests_result = self.create_all_tests_result_mock(tests_are_ok=True) - mock_cast(self.db_test_runner_mock.run).side_effect = [self.db_tests_all_tests_result, - self.linker_namespace_tests_all_tests_result] + self.db_tests_all_tests_result = self.create_all_tests_result_mock( + tests_are_ok=True + ) + self.linker_namespace_tests_all_tests_result = ( + self.create_all_tests_result_mock(tests_are_ok=True) + ) + mock_cast(self.db_test_runner_mock.run).side_effect = [ + self.db_tests_all_tests_result, + self.linker_namespace_tests_all_tests_result, + ] def test_ci_step_output_printer_call(self, complete_setup): self.execute_tests() assert self.printer_mock.mock_calls == [ call.print_file(self.db_tests_all_tests_result.command_line_output_path), - call.print_file(self.linker_namespace_tests_all_tests_result.command_line_output_path), - call.print_exasol_docker_images()] + call.print_file( + self.linker_namespace_tests_all_tests_result.command_line_output_path + ), + call.print_exasol_docker_images(), + ] def test_db_test_runner_calls(self, complete_setup, run_db_tests_calls): self.execute_tests() @@ -78,18 +109,18 @@ def test_db_test_runner_calls(self, complete_setup, run_db_tests_calls): class TestFailingRunDBTestFlavor(BaseCIExecuteTest): - @pytest.fixture( - params=[ - (False, True), - (True, False), - (False, False) - ] - ) + @pytest.fixture(params=[(False, True), (True, False), (False, False)]) def complete_setup(self, base_setup, request): - self.db_tests_all_tests_result = self.create_all_tests_result_mock(tests_are_ok=request.param[0]) - self.linker_namespace_tests_all_tests_result = self.create_all_tests_result_mock(tests_are_ok=request.param[1]) - mock_cast(self.db_test_runner_mock.run).side_effect = [self.db_tests_all_tests_result, - self.linker_namespace_tests_all_tests_result] + self.db_tests_all_tests_result = self.create_all_tests_result_mock( + tests_are_ok=request.param[0] + ) + self.linker_namespace_tests_all_tests_result = ( + self.create_all_tests_result_mock(tests_are_ok=request.param[1]) + ) + mock_cast(self.db_test_runner_mock.run).side_effect = [ + self.db_tests_all_tests_result, + self.linker_namespace_tests_all_tests_result, + ] @pytest.fixture def run_suppress_exception(self, complete_setup): @@ -103,8 +134,11 @@ def test_raises(self, complete_setup): def test_ci_step_output_printer_call(self, run_suppress_exception): assert self.printer_mock.mock_calls == [ call.print_file(self.db_tests_all_tests_result.command_line_output_path), - call.print_file(self.linker_namespace_tests_all_tests_result.command_line_output_path), - call.print_exasol_docker_images()] + call.print_file( + self.linker_namespace_tests_all_tests_result.command_line_output_path + ), + call.print_exasol_docker_images(), + ] def test_db_test_runner_calls(self, run_suppress_exception, run_db_tests_calls): assert self.db_test_runner_mock.mock_calls == run_db_tests_calls diff --git a/test/unit_tests/test_config_data_model.py b/test/unit/test_config_data_model.py similarity index 62% rename from test/unit_tests/test_config_data_model.py rename to test/unit/test_config_data_model.py index 9ac66ad..dfea23d 100644 --- a/test/unit_tests/test_config_data_model.py +++ b/test/unit/test_config_data_model.py @@ -1,25 +1,18 @@ - - import pytest -from exasol_script_languages_container_ci.lib.config.config_data_model import Config, Build, Ignore, Release +from exasol_script_languages_container_ci.lib.config.config_data_model import ( + Build, + Config, + Ignore, + Release, +) @pytest.fixture def expected_config() -> Config: config = Config( - build=Build( - ignore=Ignore( - paths=[ - "a/b/c", - "e/f/g" - ] - ), - base_branch="" - ), - release=Release( - timeout_in_minutes=1 - ) + build=Build(ignore=Ignore(paths=["a/b/c", "e/f/g"]), base_branch=""), + release=Release(timeout_in_minutes=1), ) return config diff --git a/test/unit_tests/test_config_data_model_generator.py b/test/unit/test_config_data_model_generator.py similarity index 76% rename from test/unit_tests/test_config_data_model_generator.py rename to test/unit/test_config_data_model_generator.py index 9825f77..2aad540 100644 --- a/test/unit_tests/test_config_data_model_generator.py +++ b/test/unit/test_config_data_model_generator.py @@ -1,14 +1,18 @@ import sys -from exasol_script_languages_container_ci.lib.config.data_model_generator import CONFIG_DATA_MODEL_FILE_NAME, \ - generate_config_data_model +from exasol_script_languages_container_ci.lib.config.data_model_generator import ( + CONFIG_DATA_MODEL_FILE_NAME, + generate_config_data_model, +) def test_loading_generated_module(tmp_path): config_data_model_file = tmp_path / CONFIG_DATA_MODEL_FILE_NAME generate_config_data_model(config_data_model_file) module = load_module(config_data_model_file) - assert {"Config", "Build", "Release", "Ignore", "Release"}.issubset(module.__dict__.keys()) + assert {"Config", "Build", "Release", "Ignore", "Release"}.issubset( + module.__dict__.keys() + ) def test_using_generated_module(tmp_path, expected_json_config): @@ -23,23 +27,16 @@ def test_using_generated_module(tmp_path, expected_json_config): def create_config(module): config = module.Config( build=module.Build( - ignore=module.Ignore( - paths=[ - "a/b/c", - "e/f/g" - ] - ), - base_branch="" + ignore=module.Ignore(paths=["a/b/c", "e/f/g"]), base_branch="" ), - release=module.Release( - timeout_in_minutes=1 - ) + release=module.Release(timeout_in_minutes=1), ) return config def load_module(config_data_model_file): import importlib.util + module_name = "test_create_model_can_be_imported" spec = importlib.util.spec_from_file_location(module_name, config_data_model_file) module = importlib.util.module_from_spec(spec) diff --git a/test/unit_tests/test_env.py b/test/unit/test_env.py similarity index 99% rename from test/unit_tests/test_env.py rename to test/unit/test_env.py index b620ef4..1c623d6 100644 --- a/test/unit_tests/test_env.py +++ b/test/unit/test_env.py @@ -1,4 +1,3 @@ - class TestEnv: docker_user = "test_docker_user" docker_pwd = "test_docker_pwd" diff --git a/test/unit_tests/test_ignore_folders.py b/test/unit/test_ignore_folders.py similarity index 50% rename from test/unit_tests/test_ignore_folders.py rename to test/unit/test_ignore_folders.py index eaf2b2e..e670062 100644 --- a/test/unit_tests/test_ignore_folders.py +++ b/test/unit/test_ignore_folders.py @@ -2,13 +2,18 @@ from pathlib import Path from typing import List +import git import pytest from _pytest.tmpdir import TempPathFactory from exasol_script_languages_container_ci.lib.ci import check_if_need_to_build -from exasol_script_languages_container_ci.lib.config.config_data_model import Config, Build, Release, Ignore +from exasol_script_languages_container_ci.lib.config.config_data_model import ( + Build, + Config, + Ignore, + Release, +) from exasol_script_languages_container_ci.lib.git_access import GitAccess -import git def commit_base(repo: git.Repo, repo_path: Path) -> None: @@ -16,7 +21,7 @@ def commit_base(repo: git.Repo, repo_path: Path) -> None: Create dummy commit on base branch with "something" """ (repo_path / "something").parent.mkdir(parents=True, exist_ok=True) - open(repo_path / "something", 'w').close() + open(repo_path / "something", "w").close() repo.index.add([str(repo_path / "something")]) repo.index.commit("Base commit") assert repo.active_branch.name == "master" or repo.active_branch.name == "main" @@ -24,8 +29,13 @@ def commit_base(repo: git.Repo, repo_path: Path) -> None: repo.active_branch.rename("master") -def commit_files(branch_name: str, repo: git.Repo, repo_path: Path, - files_to_commit: List[List[str]], commit_message: str) -> None: +def commit_files( + branch_name: str, + repo: git.Repo, + repo_path: Path, + files_to_commit: List[List[str]], + commit_message: str, +) -> None: """ Create empty given files (param files_to_commit) and commit them to a "Dummy commit" """ @@ -35,15 +45,17 @@ def commit_files(branch_name: str, repo: git.Repo, repo_path: Path, for file_list in files_to_commit: for file in file_list: (repo_path / file).parent.mkdir(parents=True, exist_ok=True) - open(repo_path / file, 'w').close() + open(repo_path / file, "w").close() repo.index.add([str(repo_path / file)]) repo.index.commit(commit_message) @pytest.fixture def build_config() -> Config: - config = Config(build=Build(base_branch="master", ignore=Ignore(paths=["doc", "githooks"])), - release=Release(timeout_in_minutes=10)) + config = Config( + build=Build(base_branch="master", ignore=Ignore(paths=["doc", "githooks"])), + release=Release(timeout_in_minutes=10), + ) return config @@ -51,44 +63,102 @@ def build_config() -> Config: TEST_DATA = [ # If the last commit contains files not included in the ignore-path list, the build must run - ("last_commit_not_ignore_path_build_must_run", "refs/heads/feature_branch", - [["flavors/flavor_abc/build_steps.py", "doc/something", "src/udfclient.cpp"]], "message", True), + ( + "last_commit_not_ignore_path_build_must_run", + "refs/heads/feature_branch", + [["flavors/flavor_abc/build_steps.py", "doc/something", "src/udfclient.cpp"]], + "message", + True, + ), # If there are 2 commits, and the last only contains files in the ignore-list, but the first contains # files not included in the ignore-path list, the build must run - ("commit_before_last_commit_not_ignore_path_build_must_run", "refs/heads/feature_branch", - [["flavors/flavor_abc/build_steps.py", "doc/something", "src/udfclient.cpp"], ["doc/something"]], "message", True), + ( + "commit_before_last_commit_not_ignore_path_build_must_run", + "refs/heads/feature_branch", + [ + ["flavors/flavor_abc/build_steps.py", "doc/something", "src/udfclient.cpp"], + ["doc/something"], + ], + "message", + True, + ), # If last commit(s) contain only files included in the ignore-path-list or another flavor the build must not run - ("last_commit_ignore_path_or_another_flavor_build_must_not_run", "refs/heads/feature_branch", - [["flavors/flavor_abc/build_steps.py", "doc/something"]], "message", False), + ( + "last_commit_ignore_path_or_another_flavor_build_must_not_run", + "refs/heads/feature_branch", + [["flavors/flavor_abc/build_steps.py", "doc/something"]], + "message", + False, + ), # If last commit message contains "[rebuild]" the build should always trigger - ("rebuild_in_last_commit_msg_build_must_run", "refs/heads/feature_branch", - [["flavors/flavor_abc/build_steps.py", "doc/something"]], "message [rebuild]", True), + ( + "rebuild_in_last_commit_msg_build_must_run", + "refs/heads/feature_branch", + [["flavors/flavor_abc/build_steps.py", "doc/something"]], + "message [rebuild]", + True, + ), # Affected files on current flavor should trigger a build - ("changes_in_current_flavor_build_must_run", "refs/heads/feature_branch", - [[f"flavors/{TEST_FLAVOR}/build_steps.py", "doc/something"]], "message", True), + ( + "changes_in_current_flavor_build_must_run", + "refs/heads/feature_branch", + [[f"flavors/{TEST_FLAVOR}/build_steps.py", "doc/something"]], + "message", + True, + ), # If there are 2 commits, and the last only contains files in the ignore-list, but the first contains # files of the current flavor, the build must run - ("changes_in_current_flavor_before_last_commit_build_must_run", "refs/heads/feature_branch", - [[f"flavors/{TEST_FLAVOR}/build_steps.py"], ["flavors/flavor_abc/build_steps.py"]], "message", True), - ("develop_must_always_run", "refs/heads/develop", [["doc/something"]], "message", True), + ( + "changes_in_current_flavor_before_last_commit_build_must_run", + "refs/heads/feature_branch", + [ + [f"flavors/{TEST_FLAVOR}/build_steps.py"], + ["flavors/flavor_abc/build_steps.py"], + ], + "message", + True, + ), + ( + "develop_must_always_run", + "refs/heads/develop", + [["doc/something"]], + "message", + True, + ), # Even if folder should be ignored, in case of develop branch we always expect to run - ("master_must_always_run", "refs/heads/master", [["doc/something"]], "message", True), + ( + "master_must_always_run", + "refs/heads/master", + [["doc/something"]], + "message", + True, + ), # Even if folder should be ignored, in case of master branch we always expect to run ("main_must_always_run", "refs/heads/main", [["doc/something"]], "message", True), # Even if folder should be ignored, in case of main branch we always expect to run - ("rebuild_must_always_run", "refs/heads/rebuild/feature_branch", [["doc/something"]], "message", True), + ( + "rebuild_must_always_run", + "refs/heads/rebuild/feature_branch", + [["doc/something"]], + "message", + True, + ), # Even if folder should be ignored, in case of rebuild/* branch we always expect to run ] -@pytest.mark.parametrize("test_name, branch_name, files_to_commit,commit_message, expected_result", TEST_DATA) -def test_ignore_folder_should_run_ci(test_name: str, - branch_name: str, - tmp_test_dir, - build_config: Config, - files_to_commit, - commit_message: str, - expected_result: bool): +@pytest.mark.parametrize( + "test_name, branch_name, files_to_commit,commit_message, expected_result", TEST_DATA +) +def test_ignore_folder_should_run_ci( + test_name: str, + branch_name: str, + tmp_test_dir, + build_config: Config, + files_to_commit, + commit_message: str, + expected_result: bool, +): """ This test creates a temporary git repository, commits the given file list (files_for_commit), then runs ci.check_if_need_to_build() and checks if it returned the expected result @@ -96,4 +166,7 @@ def test_ignore_folder_should_run_ci(test_name: str, repo_path = Path(tmp_test_dir) tmp_repo = git.Repo.init(repo_path) commit_files(branch_name, tmp_repo, repo_path, files_to_commit, commit_message) - assert check_if_need_to_build(branch_name, build_config, TEST_FLAVOR, GitAccess()) == expected_result + assert ( + check_if_need_to_build(branch_name, build_config, TEST_FLAVOR, GitAccess()) + == expected_result + ) diff --git a/test/unit/test_release.py b/test/unit/test_release.py new file mode 100644 index 0000000..e19a442 --- /dev/null +++ b/test/unit/test_release.py @@ -0,0 +1,76 @@ +from test.unit import ci_calls +from test.unit.test_env import test_env +from typing import Union +from unittest.mock import ( + MagicMock, + Mock, + create_autospec, +) + +import pytest + +from exasol_script_languages_container_ci.lib.ci import ci +from exasol_script_languages_container_ci.lib.ci_build import CIBuild +from exasol_script_languages_container_ci.lib.ci_push import CIPush +from exasol_script_languages_container_ci.lib.ci_security_scan import CISecurityScan +from exasol_script_languages_container_ci.lib.ci_test import CIExecuteTest +from exasol_script_languages_container_ci.lib.config.config_data_model import Config +from exasol_script_languages_container_ci.lib.release import release +from exasol_script_languages_container_ci.lib.release_uploader import ReleaseUploader + +# Testdata contain tuples of (dry_run, list(calls to CICommands)) +testdata_ci = [ + ( + True, + [ + ci_calls.prepare(), + ci_calls.build_release_call(), + ci_calls.run_db_test_call(), + ci_calls.security_scan_call(), + ci_calls.release_upload(), + ], + ), + ( + False, + [ + ci_calls.prepare(), + ci_calls.build_release_call(), + ci_calls.run_db_test_call(), + ci_calls.security_scan_call(), + ci_calls.push_release_repo(), + ci_calls.release_upload(), + ], + ), +] + + +@pytest.mark.parametrize("is_dry_run,expected_calls", testdata_ci) +def test(is_dry_run: bool, expected_calls, build_config: Config): + """ + Test that the correct steps are executed for the release: + 1. Build Image (force_rebuild = true/false) + 2. Run db tests + 3. Security scan + 4. Push to docker release repo (only without dry-run) + 5. Upload release to GitHub + """ + ci_commands_mock: Union[ + CIBuild, CIPush, CIExecuteTest, CISecurityScan, ReleaseUploader, Mock + ] = Mock() + release( + flavor="TEST_FLAVOR", + docker_user=test_env.docker_user, + docker_password=test_env.docker_pwd, + docker_release_repository=test_env.docker_release_repo, + source_repo_url="https://github.com/test_source_repo_url", + build_config=build_config, + release_id=123, + is_dry_run=is_dry_run, + release_uploader=ci_commands_mock, # type: ignore + ci_build=ci_commands_mock, # type: ignore + ci_push=ci_commands_mock, # type: ignore + ci_execute_tests=ci_commands_mock, # type: ignore + ci_security_scan=ci_commands_mock, # type: ignore + ci_prepare=ci_commands_mock, # type: ignore + ) + assert ci_commands_mock.mock_calls == expected_calls # type: ignore diff --git a/test/unit/test_release_uploader.py b/test/unit/test_release_uploader.py new file mode 100644 index 0000000..7774fc5 --- /dev/null +++ b/test/unit/test_release_uploader.py @@ -0,0 +1,51 @@ +from test.mock_cast import mock_cast +from typing import Union +from unittest.mock import ( + MagicMock, + Mock, + call, + create_autospec, + patch, +) + +from exasol_script_languages_container_ci.lib.asset_uploader import AssetUploader +from exasol_script_languages_container_ci.lib.ci_export import CIExport +from exasol_script_languages_container_ci.lib.release_uploader import ReleaseUploader + + +@patch( + "exasol_script_languages_container_ci.lib.release_uploader.TemporaryDirectory", + autospec=True, +) +def test(temp_dir_mock): + asset_uploader_mock: Union[MagicMock, AssetUploader] = create_autospec( # type: ignore + AssetUploader + ) + ci_export_mock: Union[MagicMock, CIExport] = create_autospec(CIExport) # type: ignore + release_uploader = ReleaseUploader(asset_uploader_mock, ci_export_mock) + release_uploader.release_upload( + release_id=123, + flavor_path=("test_flavor_path",), + source_repo_url="https://github.com/test_source_repo_url", + ) + expected_artifact_path = temp_dir_mock().__enter__() + assert mock_cast(asset_uploader_mock.upload_assets).mock_calls == [ + call( + repo_id="test_source_repo_url", + release_id=123, + content_type="application/gzip", + artifact_path=expected_artifact_path, + file_suffix=".tar.gz", + label_prefix="Flavor", + ), + call( + repo_id="test_source_repo_url", + release_id=123, + content_type="text/plain", + artifact_path=expected_artifact_path, + file_suffix=".tar.gz.sha512sum", + label_prefix="Checksum", + ), + ] and mock_cast(ci_export_mock.export).mock_calls == [ + call(flavor_path=("test_flavor_path",), export_path=expected_artifact_path) + ] diff --git a/test/unit_tests/ci_calls.py b/test/unit_tests/ci_calls.py deleted file mode 100644 index 9a70505..0000000 --- a/test/unit_tests/ci_calls.py +++ /dev/null @@ -1,67 +0,0 @@ -from unittest.mock import call - -from test.unit_tests.test_env import test_env - - -def prepare(): - return call.prepare() - - -def build_ci_call(force_rebuild: bool): - return call.build(flavor_path=("flavors/TEST_FLAVOR",), - rebuild=force_rebuild, - build_docker_repository=test_env.docker_build_repo, - commit_sha=test_env.commit_sha, - docker_user=test_env.docker_user, - docker_password=test_env.docker_pwd, - test_container_folder='test_container') - -def build_release_call(): - return call.build(flavor_path=("flavors/TEST_FLAVOR",), - rebuild=True, - build_docker_repository=None, - commit_sha="", - docker_user=None, - docker_password=None, - test_container_folder='test_container') - - -def run_db_test_call(): - return call.execute_tests(flavor_path=("flavors/TEST_FLAVOR",), - docker_user=test_env.docker_user, - docker_password=test_env.docker_pwd, - test_container_folder='test_container') - - -def security_scan_call(): - return call.run_security_scan(flavor_path=("flavors/TEST_FLAVOR",)) - - -def push_build_repo_with_sha_call(): - return call.push(flavor_path=("flavors/TEST_FLAVOR",), - target_docker_repository=test_env.docker_build_repo, - target_docker_tag_prefix=test_env.commit_sha, - docker_user=test_env.docker_user, - docker_password=test_env.docker_pwd) - - -def push_build_repo_without_sha_call(): - return call.push(flavor_path=("flavors/TEST_FLAVOR",), - target_docker_repository=test_env.docker_build_repo, - target_docker_tag_prefix="", - docker_user=test_env.docker_user, - docker_password=test_env.docker_pwd) - - -def push_release_repo(): - return call.push(flavor_path=("flavors/TEST_FLAVOR",), - target_docker_repository=test_env.docker_release_repo, - target_docker_tag_prefix="", - docker_user=test_env.docker_user, - docker_password=test_env.docker_pwd) - - -def release_upload(): - return call.release_upload(flavor_path=('flavors/TEST_FLAVOR',), - source_repo_url='https://github.com/test_source_repo_url', - release_id=123) diff --git a/test/unit_tests/test_ci.py b/test/unit_tests/test_ci.py deleted file mode 100644 index 63a9255..0000000 --- a/test/unit_tests/test_ci.py +++ /dev/null @@ -1,91 +0,0 @@ -from typing import Union -from unittest.mock import Mock - -import pytest - -from exasol_script_languages_container_ci.lib.ci import ci -from exasol_script_languages_container_ci.lib.ci_build import CIBuild -from exasol_script_languages_container_ci.lib.ci_push import CIPush -from exasol_script_languages_container_ci.lib.ci_security_scan import CISecurityScan -from exasol_script_languages_container_ci.lib.ci_test import CIExecuteTest -from test.unit_tests import ci_calls -from test.unit_tests.test_env import test_env - -# Testdata contain tuples of (branch, list(calls to CICommands)) -# The goal is to test that for specific branches the correct list of calls (with expected arguments) is passed to the CICommands -testdata_ci = [ - ("refs/heads/feature/test_branch", [ - ci_calls.prepare(), - ci_calls.build_ci_call(force_rebuild=False), - ci_calls.run_db_test_call(), - ci_calls.security_scan_call(), - ci_calls.push_build_repo_with_sha_call(), - ci_calls.push_build_repo_without_sha_call()] - ), - ("refs/heads/rebuild/feature/test_branch", [ - ci_calls.prepare(), - ci_calls.build_ci_call(force_rebuild=True), - ci_calls.run_db_test_call(), - ci_calls.security_scan_call(), - ci_calls.push_build_repo_with_sha_call(), - ci_calls.push_build_repo_without_sha_call()] - ), - ("refs/heads/master", [ - ci_calls.prepare(), - ci_calls.build_ci_call(force_rebuild=True), - ci_calls.run_db_test_call(), - ci_calls.security_scan_call(), - ci_calls.push_build_repo_with_sha_call(), - ci_calls.push_build_repo_without_sha_call(), - ci_calls.push_release_repo() - ] - ), - ("refs/heads/main", [ - ci_calls.prepare(), - ci_calls.build_ci_call(force_rebuild=True), - ci_calls.run_db_test_call(), - ci_calls.security_scan_call(), - ci_calls.push_build_repo_with_sha_call(), - ci_calls.push_build_repo_without_sha_call(), - ci_calls.push_release_repo() - ] - ), - ("refs/heads/develop", [ - ci_calls.prepare(), - ci_calls.build_ci_call(force_rebuild=True), - ci_calls.run_db_test_call(), - ci_calls.security_scan_call(), - ci_calls.push_build_repo_with_sha_call(), - ci_calls.push_build_repo_without_sha_call() - ] - ), - -] - - -@pytest.mark.parametrize("branch,expected_calls", testdata_ci) -def test_branches(branch, git_access_mock, expected_calls, build_config): - """ - Test that on for specific branches the correct steps are executed: - 1. Build Image (force_rebuild = true/false) - 2. Run db tests - 3. Security scan - 4. Push to docker build repo (with and without sha) - 5. Optionally: Push to docker release repo - """ - ci_commands_mock: Union[CIBuild, CIPush, CIExecuteTest, CISecurityScan, Mock] = Mock() - ci(flavor="TEST_FLAVOR", - branch_name=branch, - docker_user=test_env.docker_user, - docker_password=test_env.docker_pwd, - docker_build_repository=test_env.docker_build_repo, - docker_release_repository=test_env.docker_release_repo, - commit_sha=test_env.commit_sha, - build_config=build_config, - git_access=git_access_mock, - ci_build=ci_commands_mock, - ci_push=ci_commands_mock, - ci_execute_tests=ci_commands_mock, - ci_security_scan=ci_commands_mock, - ci_prepare=ci_commands_mock) - assert ci_commands_mock.mock_calls == expected_calls diff --git a/test/unit_tests/test_release.py b/test/unit_tests/test_release.py deleted file mode 100644 index d97c7bc..0000000 --- a/test/unit_tests/test_release.py +++ /dev/null @@ -1,66 +0,0 @@ -from typing import Union -from unittest.mock import create_autospec, MagicMock, Mock - -import pytest - -from exasol_script_languages_container_ci.lib.ci import ci -from exasol_script_languages_container_ci.lib.ci_build import CIBuild -from exasol_script_languages_container_ci.lib.ci_push import CIPush -from exasol_script_languages_container_ci.lib.ci_security_scan import CISecurityScan -from exasol_script_languages_container_ci.lib.ci_test import CIExecuteTest -from exasol_script_languages_container_ci.lib.config.config_data_model import Config -from exasol_script_languages_container_ci.lib.release import release -from exasol_script_languages_container_ci.lib.release_uploader import ReleaseUploader - -from test.unit_tests import ci_calls - -from test.unit_tests.test_env import test_env - -# Testdata contain tuples of (dry_run, list(calls to CICommands)) -testdata_ci = [ - (True, [ - ci_calls.prepare(), - ci_calls.build_release_call(), - ci_calls.run_db_test_call(), - ci_calls.security_scan_call(), - ci_calls.release_upload() - ] - ), - (False, [ - ci_calls.prepare(), - ci_calls.build_release_call(), - ci_calls.run_db_test_call(), - ci_calls.security_scan_call(), - ci_calls.push_release_repo(), - ci_calls.release_upload() - ] - ), -] - - -@pytest.mark.parametrize("is_dry_run,expected_calls", testdata_ci) -def test(is_dry_run: bool, expected_calls, build_config: Config): - """ - Test that the correct steps are executed for the release: - 1. Build Image (force_rebuild = true/false) - 2. Run db tests - 3. Security scan - 4. Push to docker release repo (only without dry-run) - 5. Upload release to GitHub - """ - ci_commands_mock: Union[CIBuild, CIPush, CIExecuteTest, CISecurityScan, ReleaseUploader, Mock] = Mock() - release(flavor="TEST_FLAVOR", - docker_user=test_env.docker_user, - docker_password=test_env.docker_pwd, - docker_release_repository=test_env.docker_release_repo, - source_repo_url="https://github.com/test_source_repo_url", - build_config=build_config, - release_id=123, - is_dry_run=is_dry_run, - release_uploader=ci_commands_mock, - ci_build=ci_commands_mock, - ci_push=ci_commands_mock, - ci_execute_tests=ci_commands_mock, - ci_security_scan=ci_commands_mock, - ci_prepare=ci_commands_mock) - assert ci_commands_mock.mock_calls == expected_calls diff --git a/test/unit_tests/test_release_uploader.py b/test/unit_tests/test_release_uploader.py deleted file mode 100644 index 25394b5..0000000 --- a/test/unit_tests/test_release_uploader.py +++ /dev/null @@ -1,44 +0,0 @@ -from typing import Union -from unittest.mock import create_autospec, MagicMock, call, patch, Mock - -from exasol_script_languages_container_ci.lib.asset_uploader import AssetUploader -from exasol_script_languages_container_ci.lib.ci_export import CIExport -from exasol_script_languages_container_ci.lib.release_uploader import ReleaseUploader -from test.mock_cast import mock_cast - - -@patch("exasol_script_languages_container_ci.lib.release_uploader.TemporaryDirectory", - autospec=True) -def test(temp_dir_mock): - asset_uploader_mock: Union[MagicMock, AssetUploader] = create_autospec(AssetUploader) - ci_export_mock: Union[MagicMock, CIExport] = create_autospec(CIExport) - release_uploader = ReleaseUploader(asset_uploader_mock, ci_export_mock) - release_uploader.release_upload( - release_id=123, - flavor_path=("test_flavor_path",), - source_repo_url="https://github.com/test_source_repo_url", - ) - expected_artifact_path = temp_dir_mock().__enter__() - assert \ - mock_cast(asset_uploader_mock.upload_assets).mock_calls == [ - call( - repo_id='test_source_repo_url', - release_id=123, - content_type='application/gzip', - artifact_path=expected_artifact_path, - file_suffix='.tar.gz', - label_prefix='Flavor' - ), - call( - repo_id='test_source_repo_url', - release_id=123, - content_type='text/plain', - artifact_path=expected_artifact_path, - file_suffix='.tar.gz.sha512sum', - label_prefix='Checksum' - ), - ] \ - and mock_cast(ci_export_mock.export).mock_calls == [ - call(flavor_path=('test_flavor_path',), - export_path=expected_artifact_path) - ]