From 1508d1a5c3787943da8d84317328a53fd82fe870 Mon Sep 17 00:00:00 2001 From: Thomas Ubensee <34603111+tomuben@users.noreply.github.com> Date: Tue, 3 May 2022 11:26:36 -0300 Subject: [PATCH] #1: Implement codebuild deployment and buildspec generation (#2) Fixes #1 * Initial setup * Initial implementation for `deploy-ci-build`, `deploy-source-credentials`, `generate-buildspec`, `validate-ci-build`, `validate-source-credentials` Co-authored-by: Nicola Coretti --- .github/workflows/check_ci.yaml | 30 + .github/workflows/check_setup_py.yaml | 20 + .github/workflows/check_version.yaml | 21 + ...ase_droid_upload_github_release_assets.yml | 52 + .github/workflows/shellcheck.yaml | 15 + .gitignore | 2 + README.md | 8 +- doc/{ => changes}/changelog.md | 0 doc/{ => changes}/changes_0.1.0.md | 2 +- .../__init__.py | 8 + .../cli/__init__.py | 0 .../cli/cli.py | 6 + .../cli/commands/__init__.py | 0 .../cli/commands/deploy_ci_build.py | 32 + .../cli/commands/deploy_source_credentials.py | 35 + .../cli/commands/generate_buildspec.py | 33 + .../cli/commands/health.py | 44 + .../cli/commands/validate_ci_build.py | 26 + .../commands/validate_source_credentials.py | 27 + .../cli/common.py | 8 + .../cli/options/__init__.py | 0 .../cli/options/aws_options.py | 6 + .../cli/options/logging.py | 20 + .../health_check.py | 79 ++ .../lib/__init__.py | 0 .../lib/aws_access.py | 80 ++ .../lib/ci_build.py | 35 + .../lib/deployer.py | 212 +++ .../lib/render_template.py | 8 + .../lib/run_generate_buildspec.py | 81 ++ .../lib/source_credentials.py | 37 + .../main.py | 6 + .../templates/build_buildspec.yaml | 41 + .../templates/buildspec_batch_entry.yaml | 7 + .../templates/buildspec_hull.yaml | 9 + .../templates/config_schema.json | 15 + .../templates/slc_code_build.yaml | 130 ++ .../templates/slc_source_credential.yaml | 8 + githooks/install.sh | 44 + githooks/pre-commit | 11 + githooks/pre-push | 43 + githooks/prohibit_commit_to_main.sh | 8 + githooks/update_packaging.sh | 44 + poetry.lock | 1146 +++++++++++++++++ pyproject.toml | 37 + scripts/build/build_release.sh | 12 + scripts/build/check_git_status.sh | 7 + scripts/build/check_release.py | 53 + scripts/build/check_release.sh | 11 + scripts/build/setup_poetry_env.sh | 10 + scripts/build/shellcheck.sh | 15 + setup.py | 41 + test/__init__.py | 0 test/cloudformation_validation.py | 19 + test/test_deploy_ci.py | 39 + test/test_deploy_source_credentials.py | 44 + test/test_generate_buildspec.py | 136 ++ user_guide/user_guide.md | 62 +- 58 files changed, 2922 insertions(+), 3 deletions(-) create mode 100644 .github/workflows/check_ci.yaml create mode 100644 .github/workflows/check_setup_py.yaml create mode 100644 .github/workflows/check_version.yaml create mode 100644 .github/workflows/release_droid_upload_github_release_assets.yml create mode 100644 .github/workflows/shellcheck.yaml rename doc/{ => changes}/changelog.md (100%) rename doc/{ => changes}/changes_0.1.0.md (75%) create mode 100644 exasol_script_languages_container_ci_setup/__init__.py create mode 100644 exasol_script_languages_container_ci_setup/cli/__init__.py create mode 100644 exasol_script_languages_container_ci_setup/cli/cli.py create mode 100644 exasol_script_languages_container_ci_setup/cli/commands/__init__.py create mode 100644 exasol_script_languages_container_ci_setup/cli/commands/deploy_ci_build.py create mode 100644 exasol_script_languages_container_ci_setup/cli/commands/deploy_source_credentials.py create mode 100644 exasol_script_languages_container_ci_setup/cli/commands/generate_buildspec.py create mode 100644 exasol_script_languages_container_ci_setup/cli/commands/health.py create mode 100644 exasol_script_languages_container_ci_setup/cli/commands/validate_ci_build.py create mode 100644 exasol_script_languages_container_ci_setup/cli/commands/validate_source_credentials.py create mode 100644 exasol_script_languages_container_ci_setup/cli/common.py create mode 100644 exasol_script_languages_container_ci_setup/cli/options/__init__.py create mode 100644 exasol_script_languages_container_ci_setup/cli/options/aws_options.py create mode 100644 exasol_script_languages_container_ci_setup/cli/options/logging.py create mode 100644 exasol_script_languages_container_ci_setup/health_check.py create mode 100644 exasol_script_languages_container_ci_setup/lib/__init__.py create mode 100644 exasol_script_languages_container_ci_setup/lib/aws_access.py create mode 100644 exasol_script_languages_container_ci_setup/lib/ci_build.py create mode 100644 exasol_script_languages_container_ci_setup/lib/deployer.py create mode 100644 exasol_script_languages_container_ci_setup/lib/render_template.py create mode 100644 exasol_script_languages_container_ci_setup/lib/run_generate_buildspec.py create mode 100644 exasol_script_languages_container_ci_setup/lib/source_credentials.py create mode 100755 exasol_script_languages_container_ci_setup/main.py create mode 100644 exasol_script_languages_container_ci_setup/templates/build_buildspec.yaml create mode 100644 exasol_script_languages_container_ci_setup/templates/buildspec_batch_entry.yaml create mode 100644 exasol_script_languages_container_ci_setup/templates/buildspec_hull.yaml create mode 100644 exasol_script_languages_container_ci_setup/templates/config_schema.json create mode 100644 exasol_script_languages_container_ci_setup/templates/slc_code_build.yaml create mode 100644 exasol_script_languages_container_ci_setup/templates/slc_source_credential.yaml create mode 100755 githooks/install.sh create mode 100755 githooks/pre-commit create mode 100755 githooks/pre-push create mode 100755 githooks/prohibit_commit_to_main.sh create mode 100755 githooks/update_packaging.sh create mode 100644 poetry.lock create mode 100644 pyproject.toml create mode 100755 scripts/build/build_release.sh create mode 100755 scripts/build/check_git_status.sh create mode 100644 scripts/build/check_release.py create mode 100755 scripts/build/check_release.sh create mode 100644 scripts/build/setup_poetry_env.sh create mode 100755 scripts/build/shellcheck.sh create mode 100644 setup.py create mode 100644 test/__init__.py create mode 100644 test/cloudformation_validation.py create mode 100644 test/test_deploy_ci.py create mode 100644 test/test_deploy_source_credentials.py create mode 100644 test/test_generate_buildspec.py diff --git a/.github/workflows/check_ci.yaml b/.github/workflows/check_ci.yaml new file mode 100644 index 0000000..e3d6121 --- /dev/null +++ b/.github/workflows/check_ci.yaml @@ -0,0 +1,30 @@ +name: Run Unit Tests + +on: + push: + branches: + - main + pull_request: + +jobs: + check_setup_py: + environment: AWS + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: 3.8 + - uses: abatilo/actions-poetry@v2.0.0 + with: + poetry-version: 1.1.13 + - name: Setup poetry env + run: bash scripts/build/setup_poetry_env.sh "python3.8" + + - name: Run pytest + run: poetry run pytest + env: # Set the secret as an env variable + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY_SECRET }} + AWS_DEFAULT_REGION: ${{ secrets.AWS_REGION }} diff --git a/.github/workflows/check_setup_py.yaml b/.github/workflows/check_setup_py.yaml new file mode 100644 index 0000000..52c654f --- /dev/null +++ b/.github/workflows/check_setup_py.yaml @@ -0,0 +1,20 @@ +name: Check packaging is up to date + +on: [push] + +jobs: + check_setup_py: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: 3.8 + - uses: abatilo/actions-poetry@v2.0.0 + with: + poetry-version: 1.1.13 + - name: Run packaging update + run: bash githooks/update_packaging.sh "python3.8" + - name: Check git status + run: bash scripts/build/check_git_status.sh diff --git a/.github/workflows/check_version.yaml b/.github/workflows/check_version.yaml new file mode 100644 index 0000000..c26644e --- /dev/null +++ b/.github/workflows/check_version.yaml @@ -0,0 +1,21 @@ +name: Check if versions are consistent + +on: [push] + +jobs: + check-version-numbers: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + - name: Setup Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Install Poetry + uses: abatilo/actions-poetry@v2.0.0 + with: + poetry-version: 1.1.13 + - name: Check Release + run: ./scripts/build/check_release.sh "python3.8" diff --git a/.github/workflows/release_droid_upload_github_release_assets.yml b/.github/workflows/release_droid_upload_github_release_assets.yml new file mode 100644 index 0000000..b718f17 --- /dev/null +++ b/.github/workflows/release_droid_upload_github_release_assets.yml @@ -0,0 +1,52 @@ +name: Release Droid - Upload GitHub Release Assets + +on: + workflow_dispatch: + inputs: + upload_url: + description: 'Upload URL' + required: true + +jobs: + check-release: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + - name: Setup Python 3.8 for integration-test-docker-environment + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Install Poetry + uses: abatilo/actions-poetry@v2.0.0 + with: + poetry-version: 1.1.13 + - name: Build Release + run: ./scripts/build/check_release.sh "python3.8" + + upload: + needs: check-release + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - name: Setup Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Install Poetry + uses: abatilo/actions-poetry@v2.0.0 + with: + poetry-version: 1.1.13 + + - name: Build Release + run: bash ./scripts/build/build_release.sh "python3.8" + - name: Generate sha512sum files + run: find ./dist -maxdepth 1 -type f -exec bash -c 'sha512sum {} > {}.sha512' \; + shell: bash + - name: Upload assets to the GitHub release draft + uses: shogo82148/actions-upload-release-asset@v1 + with: + upload_url: ${{ github.event.inputs.upload_url }} + asset_path: dist/* diff --git a/.github/workflows/shellcheck.yaml b/.github/workflows/shellcheck.yaml new file mode 100644 index 0000000..80cb2f6 --- /dev/null +++ b/.github/workflows/shellcheck.yaml @@ -0,0 +1,15 @@ +name: Check bash scripts + +on: + push: + branches: + - main + pull_request: + +jobs: + shellcheck: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Run shellcheck + run: ./scripts/build/shellcheck.sh diff --git a/.gitignore b/.gitignore index e69de29..689264b 100644 --- a/.gitignore +++ b/.gitignore @@ -0,0 +1,2 @@ +.idea +.pytest_cache diff --git a/README.md b/README.md index 1670052..d688743 100644 --- a/README.md +++ b/README.md @@ -2,4 +2,10 @@ # Overview This project contains the AWS Cloudformation YAML files to deploy the AWS infrastructure for the ScriptLanguages-Container CI jobs. -Those YAML files are not static, but generated, based on the provided Script-Languages container flavors. \ No newline at end of file +Those YAML files are not static, but generated, based on the provided Script-Languages container flavors. + +Also it contains a command to create the Buildspec for AWS Code Build, based on the list of flavors. + +## Links + +* [User Guide](./user_guide/user_guide.md) \ No newline at end of file diff --git a/doc/changelog.md b/doc/changes/changelog.md similarity index 100% rename from doc/changelog.md rename to doc/changes/changelog.md diff --git a/doc/changes_0.1.0.md b/doc/changes/changes_0.1.0.md similarity index 75% rename from doc/changes_0.1.0.md rename to doc/changes/changes_0.1.0.md index 101dcd6..0ad7070 100644 --- a/doc/changes_0.1.0.md +++ b/doc/changes/changes_0.1.0.md @@ -13,7 +13,7 @@ n/a ## Features / Enhancements -n/a + - #1: Implement codebuild deployment and buildspec generation ## Documentation diff --git a/exasol_script_languages_container_ci_setup/__init__.py b/exasol_script_languages_container_ci_setup/__init__.py new file mode 100644 index 0000000..51a7ec0 --- /dev/null +++ b/exasol_script_languages_container_ci_setup/__init__.py @@ -0,0 +1,8 @@ +from exasol_script_languages_container_ci_setup.cli.commands import ( + health, + generate_buildspec, + deploy_source_credentials, + deploy_ci_build, + validate_ci_build, + validate_source_credentials +) diff --git a/exasol_script_languages_container_ci_setup/cli/__init__.py b/exasol_script_languages_container_ci_setup/cli/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/exasol_script_languages_container_ci_setup/cli/cli.py b/exasol_script_languages_container_ci_setup/cli/cli.py new file mode 100644 index 0000000..21966a2 --- /dev/null +++ b/exasol_script_languages_container_ci_setup/cli/cli.py @@ -0,0 +1,6 @@ +import click + + +@click.group() +def cli(): + pass diff --git a/exasol_script_languages_container_ci_setup/cli/commands/__init__.py b/exasol_script_languages_container_ci_setup/cli/commands/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/exasol_script_languages_container_ci_setup/cli/commands/deploy_ci_build.py b/exasol_script_languages_container_ci_setup/cli/commands/deploy_ci_build.py new file mode 100644 index 0000000..15b21ab --- /dev/null +++ b/exasol_script_languages_container_ci_setup/cli/commands/deploy_ci_build.py @@ -0,0 +1,32 @@ +import logging +import sys + +import click + +from exasol_script_languages_container_ci_setup.cli.cli import cli +from exasol_script_languages_container_ci_setup.cli.common import add_options +from exasol_script_languages_container_ci_setup.cli.options.logging import logging_options, set_log_level +from exasol_script_languages_container_ci_setup.lib.aws_access import AwsAccess +from exasol_script_languages_container_ci_setup.lib.ci_build import run_deploy_ci_build +from exasol_script_languages_container_ci_setup.cli.options.aws_options import aws_options + + +@cli.command() +@add_options(aws_options) +@add_options(logging_options) +@click.option('--project', type=str, required=True, + help="""The project for which the stack will be created.""") +@click.option('--project-url', type=str, required=True, + help="""The URL of the project on Github.""") +def deploy_ci_build( + aws_profile: str, + log_level: str, + project: str, + project_url: str): + set_log_level(log_level) + try: + run_deploy_ci_build(AwsAccess(aws_profile), project, + project_url) + except Exception: + logging.error("run_deploy_ci_build failed.") + sys.exit(1) diff --git a/exasol_script_languages_container_ci_setup/cli/commands/deploy_source_credentials.py b/exasol_script_languages_container_ci_setup/cli/commands/deploy_source_credentials.py new file mode 100644 index 0000000..ea57502 --- /dev/null +++ b/exasol_script_languages_container_ci_setup/cli/commands/deploy_source_credentials.py @@ -0,0 +1,35 @@ +import logging +import sys + +import click + +from exasol_script_languages_container_ci_setup.cli.cli import cli +from exasol_script_languages_container_ci_setup.cli.common import add_options +from exasol_script_languages_container_ci_setup.cli.options.logging import logging_options, set_log_level +from exasol_script_languages_container_ci_setup.lib.aws_access import AwsAccess +from exasol_script_languages_container_ci_setup.lib.source_credentials import run_deploy_source_credentials +from exasol_script_languages_container_ci_setup.cli.options.aws_options import aws_options + + +@cli.command() +@add_options(aws_options) +@add_options(logging_options) +@click.option('--secret-name', required=True, type=str, + help="Secret stored in AWS Secret Manager.") +@click.option('--secret-user-key', required=True, type=str, + help="User key stored as secret in AWS Secret Manager.") +@click.option('--secret-token-key', required=True, type=str, + help="Token key stored as secret in AWS Secret Manager.") +def deploy_source_credentials( + aws_profile: str, + log_level: str, + secret_name: str, + secret_user_key: str, + secret_token_key: str): + set_log_level(log_level) + try: + run_deploy_source_credentials(AwsAccess(aws_profile), secret_name, secret_user_key, secret_token_key) + except Exception: + logging.error("deploy_source_credentials failed.") + sys.exit(1) + diff --git a/exasol_script_languages_container_ci_setup/cli/commands/generate_buildspec.py b/exasol_script_languages_container_ci_setup/cli/commands/generate_buildspec.py new file mode 100644 index 0000000..e22e927 --- /dev/null +++ b/exasol_script_languages_container_ci_setup/cli/commands/generate_buildspec.py @@ -0,0 +1,33 @@ +from pathlib import Path +from typing import Tuple, Optional + +import click + +from exasol_script_languages_container_ci_setup.cli.cli import cli +from exasol_script_languages_container_ci_setup.cli.common import add_options +from exasol_script_languages_container_ci_setup.cli.options.logging import logging_options, set_log_level +from exasol_script_languages_container_ci_setup.lib.run_generate_buildspec import run_generate_buildspec + + +@cli.command() +@add_options(logging_options) +@click.option('--flavor-root-path', required=True, multiple=True, + type=click.Path(file_okay=False, dir_okay=True, exists=True), + help="Path where script language container flavors are located.") +@click.option('--output-path', type=click.Path(file_okay=False, dir_okay=True, exists=True, writable=True), + default="./aws-code-build/ci", show_default=True, + help="Path where buildspec files will be deployed.") +@click.option('--config-file', type=click.Path(file_okay=True, dir_okay=False, exists=True), + help="Configuration file for build (project specific).") +def generate_buildspecs( + flavor_root_path: Tuple[str, ...], + log_level: str, + output_path: str, + config_file: Optional[str] + ): + """ + This command generates the buildspec file(s) for AWS CodeBuild based on the flavors located in path "flavor_root_path". + """ + set_log_level(log_level) + run_generate_buildspec(flavor_root_path, output_path, config_file) + diff --git a/exasol_script_languages_container_ci_setup/cli/commands/health.py b/exasol_script_languages_container_ci_setup/cli/commands/health.py new file mode 100644 index 0000000..51f0f82 --- /dev/null +++ b/exasol_script_languages_container_ci_setup/cli/commands/health.py @@ -0,0 +1,44 @@ +import sys +from inspect import cleandoc + +from exasol_script_languages_container_ci_setup.cli.cli import cli +from exasol_script_languages_container_ci_setup.cli.common import add_options +from exasol_script_languages_container_ci_setup.cli.options.aws_options import aws_options +from exasol_script_languages_container_ci_setup.health_check import ( + health_checkup, +) + + +@cli.command() +@add_options(aws_options) +def health(aws_profile: str): + """ + Check the health of the execution environment. + + If no issues have been found, using the library or executing the test should work just fine. + For all found issues there will be a proposed fix/solution. + + If the environment was found to be healthy the exit code will be 0. + """ + success, failure = 0, -1 + + problems = set(health_checkup(aws_profile=aws_profile)) + if not problems: + sys.exit(success) + + message = cleandoc( + """ + {count} problem(s) have been identified. + + {problems} + """ + ).format( + count=len(problems), + problems="\n".join( + ( + str(p) for p in problems + ) + ), + ) + print(message) + sys.exit(failure) diff --git a/exasol_script_languages_container_ci_setup/cli/commands/validate_ci_build.py b/exasol_script_languages_container_ci_setup/cli/commands/validate_ci_build.py new file mode 100644 index 0000000..c066685 --- /dev/null +++ b/exasol_script_languages_container_ci_setup/cli/commands/validate_ci_build.py @@ -0,0 +1,26 @@ +import logging + +import click + +from exasol_script_languages_container_ci_setup.cli.cli import cli +from exasol_script_languages_container_ci_setup.cli.common import add_options +from exasol_script_languages_container_ci_setup.cli.options.logging import logging_options, set_log_level +from exasol_script_languages_container_ci_setup.lib.aws_access import AwsAccess +from exasol_script_languages_container_ci_setup.lib.ci_build import run_deploy_ci_build, run_validate_ci_build +from exasol_script_languages_container_ci_setup.cli.options.aws_options import aws_options + + +@cli.command() +@add_options(aws_options) +@add_options(logging_options) +@click.option('--project', type=str, required=True, + help="""The project for which the stack will be created.""") +@click.option('--project-url', type=str, required=True, + help="""The URL of the project on Github.""") +def validate_ci_build( + aws_profile: str, + log_level: str, + project: str, + project_url: str): + set_log_level(log_level) + run_validate_ci_build(AwsAccess(aws_profile), project, project_url) diff --git a/exasol_script_languages_container_ci_setup/cli/commands/validate_source_credentials.py b/exasol_script_languages_container_ci_setup/cli/commands/validate_source_credentials.py new file mode 100644 index 0000000..281b5ed --- /dev/null +++ b/exasol_script_languages_container_ci_setup/cli/commands/validate_source_credentials.py @@ -0,0 +1,27 @@ +import click + +from exasol_script_languages_container_ci_setup.cli.cli import cli +from exasol_script_languages_container_ci_setup.cli.common import add_options +from exasol_script_languages_container_ci_setup.cli.options.logging import logging_options, set_log_level +from exasol_script_languages_container_ci_setup.lib.aws_access import AwsAccess +from exasol_script_languages_container_ci_setup.lib.source_credentials import run_validate_source_credentials +from exasol_script_languages_container_ci_setup.cli.options.aws_options import aws_options + + +@cli.command() +@add_options(aws_options) +@add_options(logging_options) +@click.option('--secret-name', required=True, type=str, + help="Secret name for the Github user credentials stored in AWS Secret Manager.") +@click.option('--secret-user-key', required=True, type=str, + help="Github user key stored as secret in AWS Secret Manager under the respective secret name.") +@click.option('--secret-token-key', required=True, type=str, + help="Github user token key stored as secret in AWS Secret Manager under the respective secret name.") +def validate_source_credentials( + aws_profile: str, + log_level: str, + secret_name: str, + secret_user_key: str, + secret_token_key: str): + set_log_level(log_level) + run_validate_source_credentials(AwsAccess(aws_profile), secret_name, secret_user_key, secret_token_key) diff --git a/exasol_script_languages_container_ci_setup/cli/common.py b/exasol_script_languages_container_ci_setup/cli/common.py new file mode 100644 index 0000000..28287d1 --- /dev/null +++ b/exasol_script_languages_container_ci_setup/cli/common.py @@ -0,0 +1,8 @@ + +def add_options(options): + def _add_options(func): + for option in reversed(options): + func = option(func) + return func + + return _add_options diff --git a/exasol_script_languages_container_ci_setup/cli/options/__init__.py b/exasol_script_languages_container_ci_setup/cli/options/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/exasol_script_languages_container_ci_setup/cli/options/aws_options.py b/exasol_script_languages_container_ci_setup/cli/options/aws_options.py new file mode 100644 index 0000000..ad14367 --- /dev/null +++ b/exasol_script_languages_container_ci_setup/cli/options/aws_options.py @@ -0,0 +1,6 @@ +import click + +aws_options = [ + click.option('--aws-profile', required=True, type=str, + help="Id of the AWS profile to use."), +] diff --git a/exasol_script_languages_container_ci_setup/cli/options/logging.py b/exasol_script_languages_container_ci_setup/cli/options/logging.py new file mode 100644 index 0000000..407e6cb --- /dev/null +++ b/exasol_script_languages_container_ci_setup/cli/options/logging.py @@ -0,0 +1,20 @@ +import logging + +import click + +SUPPORTED_LOG_LEVELS = {"normal": logging.WARNING, "info": logging.INFO, "debug": logging.DEBUG} + +logging_options = [ + click.option('--log-level', type=click.Choice(list(SUPPORTED_LOG_LEVELS.keys())), default="normal", + show_default=True, + help="Level of information printed out. " + "'Normal' prints only necessary information. " + "'Info' prints also internal status info. 'Debug' prints detailed information."), +] + + +def set_log_level(level: str): + try: + logging.basicConfig(level=SUPPORTED_LOG_LEVELS[level]) + except KeyError as ex: + raise ValueError(f"log level {level} is not supported!") from ex diff --git a/exasol_script_languages_container_ci_setup/health_check.py b/exasol_script_languages_container_ci_setup/health_check.py new file mode 100644 index 0000000..f3d0740 --- /dev/null +++ b/exasol_script_languages_container_ci_setup/health_check.py @@ -0,0 +1,79 @@ +""" +The health check module provides functionality to check the health of the `slc-ci-setup` +package and also provide help to find potential fixes. +""" +import shlex +import subprocess +import sys +from typing import Iterator, Optional + +from exasol_error_reporting_python.error_message_builder import ErrorMessageBuilder +from exasol_error_reporting_python.exa_error import ExaError + +SUPPORTED_PLATFORMS = ["linux", "darwin"] + + +def check_shell_cmd(cmd: str) -> bool: + """ + Runs a shell command and returns True if exit code was 0, False otherwise + :param cmd: shell command to execute + :return: returns True if exit code was 0, False otherwise + """ + result = subprocess.run(shlex.split(cmd), capture_output=True) + return result.returncode == 0 + + +def is_supported_platform(**kwargs) -> Optional[ErrorMessageBuilder]: + """ + Checks weather or not the current platform is supported. + """ + if sys.platform not in SUPPORTED_PLATFORMS: + return ExaError.message_builder('E-SLCCS-02') \ + .message("The platform you are running on is not supported.") \ + .mitigation("Make sure you are using one of the following platforms: {SUPPORTED_PLATFORMS}.") + + +def aws_cli_available(**kwargs) -> Optional[ErrorMessageBuilder]: + """Checks weather AWS cli is installed""" + command = "aws --help" + if not check_shell_cmd(command): + return ExaError.message_builder('E-SLCCS-03').message("AWS CLI not installed.") \ + .mitigation("Install AWS CLI. " + "Goto https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html") + + +def aws_profile_valid(aws_profile: str) -> Optional[ErrorMessageBuilder]: + """Checks weather the given AWS profile is configured properly.""" + command = f"aws --profile {aws_profile} configure list" + if not check_shell_cmd(command): + return ExaError.message_builder('E-SLCCS-04').message("AWS Profile invalid.") \ + .mitigation("Run 'aws configure --profile $your_profile' or " + "'aws configure' to configure the default profile.") + + +def aws_access_key_valid(aws_profile: str) -> Optional[ErrorMessageBuilder]: + """Checks weather AWS access key is configured for the given AWS profile.""" + command = f"aws --profile {aws_profile} iam list-access-keys" + if not check_shell_cmd(command): + return ExaError.message_builder('E-SLCCS-05').message("AWS Access Key invalid.") \ + .mitigation("Go to the AWS console and create an access key for your user. " + "Then register the access key with 'aws configure --profile $your_profile' or " + "'aws configure' for the default profile.") + + +def health_checkup(**kwargs) -> Iterator[ErrorMessageBuilder]: + """ + Runs all known examinations + + return an iterator of error codes specifying which problems have been identified. + """ + examinations = [ + is_supported_platform, + aws_cli_available, + aws_profile_valid, + aws_access_key_valid, + ] + for examination in examinations: + res = examination(**kwargs) + if res is not None: + yield res diff --git a/exasol_script_languages_container_ci_setup/lib/__init__.py b/exasol_script_languages_container_ci_setup/lib/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/exasol_script_languages_container_ci_setup/lib/aws_access.py b/exasol_script_languages_container_ci_setup/lib/aws_access.py new file mode 100644 index 0000000..71a14fb --- /dev/null +++ b/exasol_script_languages_container_ci_setup/lib/aws_access.py @@ -0,0 +1,80 @@ +import logging +from typing import Optional + +import boto3 +from botocore.exceptions import ClientError + +from exasol_script_languages_container_ci_setup.lib.deployer import Deployer + + +class AwsAccess(object): + def __init__(self, aws_profile: Optional[str]): + self._aws_profile = aws_profile + + @property + def aws_profile(self) -> Optional[str]: + return self._aws_profile + + def upload_cloudformation_stack(self, yml: str, stack_name: str): + """ + Deploy the cloudformation stack. + """ + if self._aws_profile is not None: + logging.debug(f"Running upload_cloudformation_stack for aws profile {self._aws_profile}") + aws_session = boto3.session.Session(profile_name=self._aws_profile) + cloud_client = aws_session.client('cloudformation') + else: + logging.debug(f"Running upload_cloudformation_stack for default aws profile.") + cloud_client = boto3.client('cloudformation') + try: + cfn_deployer = Deployer(cloudformation_client=cloud_client) + result = cfn_deployer.create_and_wait_for_changeset(stack_name=stack_name, cfn_template=yml, + parameter_values=[], + capabilities=("CAPABILITY_IAM",), role_arn=None, + notification_arns=None, tags=tuple()) + except Exception as e: + logging.error(f"Error creating changeset for cloud formation template: {e}") + raise e + try: + cfn_deployer.execute_changeset(changeset_id=result.changeset_id, stack_name=stack_name) + cfn_deployer.wait_for_execute(stack_name=stack_name, changeset_type=result.changeset_type) + except Exception as e: + logging.error(f"Error executing changeset for cloud formation template: {e}") + logging.error(f"Run 'aws cloudformation describe-stack-events --stack-name {stack_name}' to get details.") + raise e + + def read_secret_arn(self, secret_name: str): + """" + Uses Boto3 to retrieve the ARN of a secret. + """ + logging.debug(f"Reading secret for getting ARN, secret name = {secret_name}, " + f"for aws profile {self._aws_profile}") + session = boto3.session.Session(profile_name=self._aws_profile) + client = session.client(service_name='secretsmanager') + + try: + get_secret_value_response = client.get_secret_value(SecretId=secret_name) + return get_secret_value_response["ARN"] + except ClientError as e: + logging.error("Unable to read secret") + raise e + + def read_dockerhub_secret_arn(self): + return self.read_secret_arn("Dockerhub") + + def validate_cloudformation_template(self, cloudformation_yml) -> None: + """ + This function pushes the YAML to AWS Cloudformation for validation + (see https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/using-cfn-validate-template.html) + Pitfall: Boto3 expects the YAML string as parameter, whereas the AWS CLI expects the file URL as parameter. + It requires to have the AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY env variables set correctly. + """ + if self._aws_profile is not None: + logging.debug(f"Running validate_cloudformation_template for aws profile {self._aws_profile}") + aws_session = boto3.session.Session(profile_name=self._aws_profile) + cloud_client = aws_session.client('cloudformation') + cloud_client.validate_template(TemplateBody=cloudformation_yml) + else: + logging.debug(f"Running validate_cloudformation_template for default aws profile.") + cloud_client = boto3.client('cloudformation') + cloud_client.validate_template(TemplateBody=cloudformation_yml) diff --git a/exasol_script_languages_container_ci_setup/lib/ci_build.py b/exasol_script_languages_container_ci_setup/lib/ci_build.py new file mode 100644 index 0000000..08783c0 --- /dev/null +++ b/exasol_script_languages_container_ci_setup/lib/ci_build.py @@ -0,0 +1,35 @@ +import logging + +from exasol_script_languages_container_ci_setup.lib.aws_access import AwsAccess +from exasol_script_languages_container_ci_setup.lib.render_template import render_template + +CODE_BUILD_STACK_NAME = "CIBuild" + + +def stack_name(project: str): + return f"{project}{CODE_BUILD_STACK_NAME}" + + +def run_deploy_ci_build(aws_access: AwsAccess, project: str, github_url: str): + """ + This command deploys the ci build cloudformation stack + 1. It get's the dockerhub secret ARN from AWS via Boto3 + 2. Then it renders the template and uploads the resulting cloudformation YAML file. + """ + logging.info(f"run_deploy_ci_build for aws profile {aws_access.aws_profile} for project {project} at {github_url}") + dockerhub_secret_arn = aws_access.read_dockerhub_secret_arn() + yml = render_template("slc_code_build.yaml", project=project, + dockerhub_secret_arn=dockerhub_secret_arn, github_url=github_url) + aws_access.upload_cloudformation_stack(yml, stack_name(project)) + + +def run_validate_ci_build(aws_access: AwsAccess, project: str, github_url: str): + """ + This command validates the ci build cloudformation stack + """ + logging.info(f"run_validate_ci_build for aws profile {aws_access.aws_profile} " + f"for project {project} at {github_url}") + dockerhub_secret_arn = "dummy_arn" + yml = render_template("slc_code_build.yaml", project=project, + dockerhub_secret_arn=dockerhub_secret_arn, github_url=github_url) + aws_access.validate_cloudformation_template(yml) diff --git a/exasol_script_languages_container_ci_setup/lib/deployer.py b/exasol_script_languages_container_ci_setup/lib/deployer.py new file mode 100644 index 0000000..6c8c494 --- /dev/null +++ b/exasol_script_languages_container_ci_setup/lib/deployer.py @@ -0,0 +1,212 @@ +# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +#Source: https://github.com/aws/aws-cli/blob/e1f7196ad7859a8144f0313fa4b407da5ae8b101/awscli/customizations/cloudformation/deployer.py + +import time +import logging +import botocore +import collections + +from datetime import datetime + +from botocore.exceptions import ValidationError + +LOG = logging.getLogger(__name__) + +ChangeSetResult = collections.namedtuple( + "ChangeSetResult", ["changeset_id", "changeset_type"]) + + +class Deployer(object): + + def __init__(self, cloudformation_client, + changeset_prefix="slc-ci-setup-deploy-"): + self._client = cloudformation_client + self.changeset_prefix = changeset_prefix + + def has_stack(self, stack_name): + """ + Checks if a CloudFormation stack with given name exists + + :param stack_name: Name or ID of the stack + :return: True if stack exists. False otherwise + """ + try: + resp = self._client.describe_stacks(StackName=stack_name) + if len(resp["Stacks"]) != 1: + return False + + # When you run CreateChangeSet on a a stack that does not exist, + # CloudFormation will create a stack and set it's status + # REVIEW_IN_PROGRESS. However this stack is cannot be manipulated + # by "update" commands. Under this circumstances, we treat like + # this stack does not exist and call CreateChangeSet will + # ChangeSetType set to CREATE and not UPDATE. + stack = resp["Stacks"][0] + return stack["StackStatus"] != "REVIEW_IN_PROGRESS" + + except botocore.exceptions.ClientError as e: + # If a stack does not exist, describe_stacks will throw an + # exception. Unfortunately we don't have a better way than parsing + # the exception msg to understand the nature of this exception. + msg = str(e) + + if "Stack with id {0} does not exist".format(stack_name) in msg: + LOG.debug("Stack with id {0} does not exist".format( + stack_name)) + return False + else: + # We don't know anything about this exception. Don't handle + LOG.debug("Unable to get stack details.", exc_info=e) + raise e + + def create_changeset(self, stack_name, cfn_template, + parameter_values, capabilities, role_arn, + notification_arns, tags): + """ + Call Cloudformation to create a changeset and wait for it to complete + + :param stack_name: Name or ID of stack + :param cfn_template: CloudFormation template string + :param parameter_values: Template parameters object + :param capabilities: Array of capabilities passed to CloudFormation + :param tags: Array of tags passed to CloudFormation + :return: + """ + + now = datetime.utcnow().isoformat() + description = "Created by AWS CLI at {0} UTC".format(now) + + # Each changeset will get a unique name based on time + changeset_name = self.changeset_prefix + str(int(time.time())) + + if not self.has_stack(stack_name): + changeset_type = "CREATE" + # When creating a new stack, UsePreviousValue=True is invalid. + # For such parameters, users should either override with new value, + # or set a Default value in template to successfully create a stack. + parameter_values = [x for x in parameter_values + if not x.get("UsePreviousValue", False)] + else: + changeset_type = "UPDATE" + # UsePreviousValue not valid if parameter is new + summary = self._client.get_template_summary(StackName=stack_name) + existing_parameters = [parameter['ParameterKey'] for parameter in + summary['Parameters']] + parameter_values = [x for x in parameter_values + if not (x.get("UsePreviousValue", False) and + x["ParameterKey"] not in existing_parameters)] + + kwargs = { + 'ChangeSetName': changeset_name, + 'StackName': stack_name, + 'TemplateBody': cfn_template, + 'ChangeSetType': changeset_type, + 'Parameters': parameter_values, + 'Capabilities': capabilities, + 'Description': description, + 'Tags': tags, + } + + # don't set these arguments if not specified to use existing values + if role_arn is not None: + kwargs['RoleARN'] = role_arn + if notification_arns is not None: + kwargs['NotificationARNs'] = notification_arns + try: + resp = self._client.create_change_set(**kwargs) + return ChangeSetResult(resp["Id"], changeset_type) + except Exception as ex: + LOG.debug("Unable to create changeset", exc_info=ex) + raise ex + + def wait_for_changeset(self, changeset_id, stack_name): + """ + Waits until the changeset creation completes + + :param changeset_id: ID or name of the changeset + :param stack_name: Stack name + :return: Latest status of the create-change-set operation + """ + LOG.info("Waiting for changeset to be created..") + + # Wait for changeset to be created + waiter = self._client.get_waiter("change_set_create_complete") + # Poll every 5 seconds. Changeset creation should be fast + waiter_config = {'Delay': 5} + try: + waiter.wait(ChangeSetName=changeset_id, StackName=stack_name, + WaiterConfig=waiter_config) + except botocore.exceptions.WaiterError as ex: + LOG.debug("Create changeset waiter exception", exc_info=ex) + + resp = ex.last_response + status = resp["Status"] + reason = resp["StatusReason"] + + raise RuntimeError("Failed to create the changeset: {0} " + "Status: {1}. Reason: {2}" + .format(ex, status, reason)) from ex + + def execute_changeset(self, changeset_id, stack_name, + disable_rollback=False): + """ + Calls CloudFormation to execute changeset + + :param changeset_id: ID of the changeset + :param stack_name: Name or ID of the stack + :param disable_rollback: Disable rollback of all resource changes + :return: Response from execute-change-set call + """ + return self._client.execute_change_set( + ChangeSetName=changeset_id, + StackName=stack_name, + DisableRollback=disable_rollback) + + def wait_for_execute(self, stack_name, changeset_type): + + LOG.info("Waiting for stack create/update to complete\n") + + # Pick the right waiter + if changeset_type == "CREATE": + waiter = self._client.get_waiter("stack_create_complete") + elif changeset_type == "UPDATE": + waiter = self._client.get_waiter("stack_update_complete") + else: + raise RuntimeError("Invalid changeset type {0}" + .format(changeset_type)) + + # Poll every 30 seconds. Polling too frequently risks hitting rate limits + # on CloudFormation's DescribeStacks API + waiter_config = { + 'Delay': 30, + 'MaxAttempts': 120, + } + + try: + waiter.wait(StackName=stack_name, WaiterConfig=waiter_config) + except botocore.exceptions.WaiterError as ex: + LOG.debug("Execute changeset waiter exception", exc_info=ex) + raise RuntimeError("Execute changeset waiter exception", ex) + + def create_and_wait_for_changeset(self, stack_name, cfn_template, + parameter_values, capabilities, role_arn, + notification_arns, tags): + + result = self.create_changeset( + stack_name, cfn_template, parameter_values, capabilities, + role_arn, notification_arns, tags) + self.wait_for_changeset(result.changeset_id, stack_name) + + return result diff --git a/exasol_script_languages_container_ci_setup/lib/render_template.py b/exasol_script_languages_container_ci_setup/lib/render_template.py new file mode 100644 index 0000000..18819f2 --- /dev/null +++ b/exasol_script_languages_container_ci_setup/lib/render_template.py @@ -0,0 +1,8 @@ +import jinja2 + + +def render_template(template: str, **kwargs): + env = jinja2.Environment(loader=jinja2.PackageLoader("exasol_script_languages_container_ci_setup"), + autoescape=jinja2.select_autoescape()) + t = env.get_template(template) + return t.render(**kwargs) diff --git a/exasol_script_languages_container_ci_setup/lib/run_generate_buildspec.py b/exasol_script_languages_container_ci_setup/lib/run_generate_buildspec.py new file mode 100644 index 0000000..d81abde --- /dev/null +++ b/exasol_script_languages_container_ci_setup/lib/run_generate_buildspec.py @@ -0,0 +1,81 @@ +import json +import logging +from dataclasses import dataclass +from pathlib import Path +from typing import Tuple, Optional +import jsonschema + +from exasol_script_languages_container_ci_setup.lib.render_template import render_template + + +@dataclass(eq=True, frozen=True) +class Flavor(object): + """" + Holds the name and the formatted name used for generating the buildspec. + """ + flavor_original: str + + @property + def flavor_formatted(self) -> str: + return self.flavor_original.replace(".", "").replace("-", "_") + + +def validate_config_file(config_file: Optional[str]): + """ + Validates config file, path given by parameter config_file. + :raises: + + `jsonschema.exceptions.ValidationError` if the config file has invalid JSON format. + `jsonschema.exceptions.SchemaError` if the config file is not in accordance with the the schema. + `ValueError` if the ignored path given in the config file does not exist. + """ + if config_file is not None: + with open(config_file, "r") as config_file_: + config = json.load(config_file_) + config_schema = json.loads(render_template("config_schema.json")) + jsonschema.validate(instance=config, schema=config_schema) + ignored_paths = config["build_ignore"]["ignored_paths"] + for ignored_path in ignored_paths: + folder_path = Path(ignored_path) + if not folder_path.exists(): + raise ValueError(f"Ignored folder '{ignored_path}' does not exist.") + + +def get_config_file_parameter(config_file: Optional[str]): + if config_file is None: + return "" + return f"--config-file {config_file}" + + +def run_generate_buildspec( + flavor_root_paths: Tuple[str, ...], + output_pathname: str, + config_file: Optional[str]): + validate_config_file(config_file) + flavors = set() + logging.info(f"Run run_generate_buildspec for paths: {flavor_root_paths}") + for flavor_root_path in [Path(f).resolve() for f in flavor_root_paths]: + assert flavor_root_path.is_dir() + assert flavor_root_path.exists() + assert flavor_root_path.name == "flavors" + dirs = (d for d in flavor_root_path.iterdir() if d.is_dir()) + flavors.update(map(lambda directory: Flavor(directory.name), dirs)) + logging.info(f"Found flavors: {flavors}") + buildspec_body = [] + for flavor in flavors: + buildspec_body.append(render_template("buildspec_batch_entry.yaml", + flavor_original=flavor.flavor_original, + flavor_formatted=flavor.flavor_formatted, + out_path=output_pathname)) + + result_yaml = render_template("buildspec_hull.yaml", batch_entries="\n".join(buildspec_body)) + + output_pathname = Path(output_pathname) + with open(output_pathname / "buildspec.yaml", "w") as output_file: + output_file.write(result_yaml) + + result_build_yaml = render_template("build_buildspec.yaml", + config_file_parameter=get_config_file_parameter(config_file)) + + with open(output_pathname / "build_buildspec.yaml", "w") as output_file: + output_file.write(result_build_yaml) diff --git a/exasol_script_languages_container_ci_setup/lib/source_credentials.py b/exasol_script_languages_container_ci_setup/lib/source_credentials.py new file mode 100644 index 0000000..9503a8c --- /dev/null +++ b/exasol_script_languages_container_ci_setup/lib/source_credentials.py @@ -0,0 +1,37 @@ +import logging + +from exasol_script_languages_container_ci_setup.lib.aws_access import AwsAccess +from exasol_script_languages_container_ci_setup.lib.render_template import render_template + +SOURCE_CREDENTIALS_STACK_NAME = "SLCSourceCredentials" + + +def run_deploy_source_credentials( + aws_access: AwsAccess, + secret_name: str, + secret_user_key: str, + secret_token_key: str): + """ + This command deploys the source credential as cloudformation stack. + """ + logging.info(f"run_deploy_source_credentials for aws profile {aws_access.aws_profile}") + yml = render_template("slc_source_credential.yaml", + secret_name=secret_name, + secret_user_key=secret_user_key, + secret_token_key=secret_token_key) + aws_access.upload_cloudformation_stack(yml, SOURCE_CREDENTIALS_STACK_NAME) + + +def run_validate_source_credentials(aws_access: AwsAccess, + secret_name: str, + secret_user_key: str, + secret_token_key: str): + """ + This command deploys the source credential as cloudformation stack. + """ + logging.info(f"run_validate_source_credentials for aws profile {aws_access.aws_profile}") + yml = render_template("slc_source_credential.yaml", + secret_name=secret_name, + secret_user_key=secret_user_key, + secret_token_key=secret_token_key) + aws_access.validate_cloudformation_template(yml) diff --git a/exasol_script_languages_container_ci_setup/main.py b/exasol_script_languages_container_ci_setup/main.py new file mode 100755 index 0000000..7b8cb72 --- /dev/null +++ b/exasol_script_languages_container_ci_setup/main.py @@ -0,0 +1,6 @@ +#! /usr/bin/env python3 +# +from exasol_script_languages_container_ci_setup.cli.cli import cli + +if __name__ == '__main__': + cli() diff --git a/exasol_script_languages_container_ci_setup/templates/build_buildspec.yaml b/exasol_script_languages_container_ci_setup/templates/build_buildspec.yaml new file mode 100644 index 0000000..b07882c --- /dev/null +++ b/exasol_script_languages_container_ci_setup/templates/build_buildspec.yaml @@ -0,0 +1,41 @@ +version: 0.2 + +# ---- AUTOMATICALLY GENERATED FILE -------- +# ---- DO NOT EDIT MANUALLY, BUT USE PYTHON MODULE "script-languages-container-ci-setup" TO UPDATE --- +env: + shell: bash + variables: + BUILD_DOCKER_REPOSITORY: "exadockerci4/script-languages-build-cache" + RELEASE_DOCKER_REPOSITORY: "exasol/script-language-container" + secrets-manager: + DOCKER_USER: "Dockerhub:User" + DOCKER_PASSWORD: "Dockerhub:AccessToken" +phases: + + install: + runtime-versions: + python: 3.8 + commands: + - git submodule update --init --recursive + - curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python3 - + - export PATH=$PATH:$HOME/.poetry/bin + - poetry env use $(command -v "python3.8") + - poetry install + + pre_build: + commands: + - echo CODEBUILD_SOURCE_VERSION is $CODEBUILD_SOURCE_VERSION #supposed to be the SHA + - echo CODEBUILD_WEBHOOK_HEAD_REF is $CODEBUILD_WEBHOOK_HEAD_REF #supposed to the branch name + - echo "$DOCKER_PASSWORD" | docker login --username "$DOCKER_USER" --password-stdin + build: + commands: + - poetry run python3 -m exasol_script_languages_container_ci.main run-ci --flavor $FLAVOR --branch-name "$CODEBUILD_WEBHOOK_HEAD_REF" --docker-user "$DOCKER_USER" --docker-password "$DOCKER_PASSWORD" --docker-build-repository "$BUILD_DOCKER_REPOSITORY" --docker-release-repository "$RELEASE_DOCKER_REPOSITORY" --commit-sha "$CODEBUILD_SOURCE_VERSION" {{ config_file_parameter }} + +artifacts: + files: + - 'jobs/**/*' + - 'security_scan/**/*' + name: build_output_$(date +%Y-%m-%d-%Hh-%Mm-%Ss) + base-directory: .build_output + s3-prefix: flavor_$FLAVOR + diff --git a/exasol_script_languages_container_ci_setup/templates/buildspec_batch_entry.yaml b/exasol_script_languages_container_ci_setup/templates/buildspec_batch_entry.yaml new file mode 100644 index 0000000..ab17f60 --- /dev/null +++ b/exasol_script_languages_container_ci_setup/templates/buildspec_batch_entry.yaml @@ -0,0 +1,7 @@ + - identifier: build_{{flavor_formatted}} + env: + variables: + FLAVOR: {{flavor_original}} + buildspec: {{out_path}}/build_buildspec.yaml + privileged-mode: true + type: BUILD_GENERAL1_MEDIUM diff --git a/exasol_script_languages_container_ci_setup/templates/buildspec_hull.yaml b/exasol_script_languages_container_ci_setup/templates/buildspec_hull.yaml new file mode 100644 index 0000000..f7e1f14 --- /dev/null +++ b/exasol_script_languages_container_ci_setup/templates/buildspec_hull.yaml @@ -0,0 +1,9 @@ +version: 0.2 + +# ---- AUTOMATICALLY GENERATED FILE -------- +# ---- DO NOT EDIT MANUALLY, BUT USE PYTHON MODULE "script-languages-container-ci-setup" TO UPDATE --- + +batch: + fast-fail: false + build-graph: +{{batch_entries}} diff --git a/exasol_script_languages_container_ci_setup/templates/config_schema.json b/exasol_script_languages_container_ci_setup/templates/config_schema.json new file mode 100644 index 0000000..019027d --- /dev/null +++ b/exasol_script_languages_container_ci_setup/templates/config_schema.json @@ -0,0 +1,15 @@ +{ + "type" : "object", + "required": [ "build_ignore" ], + "additionalProperties" : false, + "properties" : { + "build_ignore": { + "type" : "object", + "additionalProperties" : false, + "required": [ "ignored_paths" ], + "properties" : { + "ignored_paths" : { "type" : "array"} + } + } + } +} diff --git a/exasol_script_languages_container_ci_setup/templates/slc_code_build.yaml b/exasol_script_languages_container_ci_setup/templates/slc_code_build.yaml new file mode 100644 index 0000000..944cdd1 --- /dev/null +++ b/exasol_script_languages_container_ci_setup/templates/slc_code_build.yaml @@ -0,0 +1,130 @@ +Resources: + ArtifactsBucket: + Type: AWS::S3::Bucket + Properties: + PublicAccessBlockConfiguration: + BlockPublicAcls: true + BlockPublicPolicy: true + IgnorePublicAcls: true + RestrictPublicBuckets: true + BucketEncryption: + ServerSideEncryptionConfiguration: + - ServerSideEncryptionByDefault: + SSEAlgorithm: 'aws:kms' + KMSMasterKeyID: !Sub "arn:aws:kms:${AWS::Region}:${AWS::AccountId}:alias/aws/s3" + BucketKeyEnabled: true + LifecycleConfiguration: + Rules: + - Id: ExpirationRule + Status: Enabled + ExpirationInDays: 30 + + CodeBuildRole: + Type: AWS::IAM::Role + Properties: + AssumeRolePolicyDocument: + Version: 2012-10-17 + Statement: + - Effect: Allow + Principal: + Service: + - codebuild.amazonaws.com + Action: + - sts:AssumeRole + Description: !Sub "IAM Role for ${AWS::StackName}" + Path: '/' + Policies: + - PolicyName: root + PolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Action: + - codebuild:* + - logs:* + Resource: '*' + - Effect: Allow + Action: + - secretsmanager:GetSecretValue + Resource: + #- arn:aws:secretsmanager:eu-central-1:620087982706:secret:Dockerhub-pFnwBg + - {{ dockerhub_secret_arn }} + - Effect: Allow + Action: + - s3:PutObject + - s3:GetObject + Resource: + - !Sub "arn:aws:s3:::${ArtifactsBucket}/*" + BatchBuildRole: + Type: AWS::IAM::Role + Properties: + AssumeRolePolicyDocument: + Version: 2012-10-17 + Statement: + - Effect: Allow + Principal: + Service: + - codebuild.amazonaws.com + Action: + - sts:AssumeRole + Description: !Sub "IAM Role for controlling builds on ${AWS::StackName}" + Path: '/' + Policies: + - PolicyName: root + PolicyDocument: + Version: '2012-10-17' + Statement: + - Effect: Allow + Action: + - codebuild:StartBuild + - codebuild:StopBuild + - codebuild:RetryBuild + Resource: '*' + {{project}}CodeBuild: + Type: AWS::CodeBuild::Project + Properties: + Description: {{ project }} Container CodeBuild + Triggers: + BuildType: BUILD_BATCH + Webhook: True + FilterGroups: + - - Type: EVENT + Pattern: PUSH,PULL_REQUEST_MERGED + - Type: HEAD_REF + Pattern: ^refs/heads/(main|master|rebuild|feature|bug|enhancement|refactoring|ci|security)/.* + ExcludeMatchedPattern: false + ServiceRole: !GetAtt CodeBuildRole.Arn + BuildBatchConfig: + BatchReportMode: REPORT_INDIVIDUAL_BUILDS + ServiceRole: !GetAtt BatchBuildRole.Arn + TimeoutInMins: 240 + Artifacts: + Location: !Ref ArtifactsBucket + OverrideArtifactName: true + Type: S3 + Environment: + Type: LINUX_CONTAINER + PrivilegedMode: true + ComputeType: BUILD_GENERAL1_SMALL + Image: aws/codebuild/standard:5.0 + Source: + Type: GITHUB + Location: {{github_url}} + BuildSpec: "aws-code-build/ci/buildspec.yaml" + TimeoutInMinutes: 240 + LogsConfig: + S3Logs: + Location: !Sub "${ArtifactsBucket}/logs" + Status: ENABLED + +#Trick to have log retention, see https://medium.com/allermedia-techblog/cloudformation-example-log-retention-for-lambda-and-codebuild-a11e74516bb6 + CodeBuildLogGroup: + Type: AWS::Logs::LogGroup + Properties: + LogGroupName: !Sub "/aws/codebuild/${ {{project}}CodeBuild }" + RetentionInDays: 14 + +Outputs: + ProjectName: + Value: !Ref {{project}}CodeBuild + Description: {{ project }} Build project name diff --git a/exasol_script_languages_container_ci_setup/templates/slc_source_credential.yaml b/exasol_script_languages_container_ci_setup/templates/slc_source_credential.yaml new file mode 100644 index 0000000..5380cbd --- /dev/null +++ b/exasol_script_languages_container_ci_setup/templates/slc_source_credential.yaml @@ -0,0 +1,8 @@ +Resources: + CodeBuildCredentials: + Type: AWS::CodeBuild::SourceCredential + Properties: + ServerType: GITHUB + AuthType: PERSONAL_ACCESS_TOKEN + Username: "{{ '{{' }}resolve:secretsmanager:{{secret_name}}:SecretString:{{secret_user_key}}}}" + Token: "{{ '{{' }}resolve:secretsmanager:{{secret_name}}:SecretString:{{secret_token_key}}}}" diff --git a/githooks/install.sh b/githooks/install.sh new file mode 100755 index 0000000..071ca0e --- /dev/null +++ b/githooks/install.sh @@ -0,0 +1,44 @@ +#!/bin/bash +set -o errexit +set -o nounset +set -o pipefail + +SCRIPT_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")" +REPO_DIR=$(git rev-parse --show-toplevel) +REPO_DIR="$(readlink -f "${REPO_DIR}")" +GIT_DIR="$REPO_DIR/.git" +GIT_DIR="$(readlink -f "${GIT_DIR}")" + +if [[ ! -d "$GIT_DIR" ]]; then + echo "$GIT_DIR is not a git directory." >&2 + exit 1 +else + GITHOOKS_PATH="$GIT_DIR/hooks" +fi + +GITHOOKS_PATH="$(readlink -f "${GITHOOKS_PATH}")" + +copy_hook() { + local SCRIPT_PATH="$SCRIPT_DIR/$1" + local GITHOOK_PATH="$GITHOOKS_PATH/$2" + local RELATIVE_PATH="" + RELATIVE_PATH=$(realpath --relative-to="$GITHOOKS_PATH" "$SCRIPT_PATH") + pushd "$GITHOOKS_PATH" > /dev/null + if [ -e "$GITHOOK_PATH" ] || [ -L "$GITHOOK_PATH" ] + then + echo + echo "Going to delete old hook $GITHOOK_PATH" + rm "$GITHOOK_PATH" > /dev/null + fi + echo + echo "Link hook to script" >&2 + echo "Hook-Path: $GITHOOK_PATH" >&2 + echo "Script-path: $SCRIPT_PATH" >&2 + echo + ln -s "$RELATIVE_PATH" "$2" > /dev/null + chmod +x "$SCRIPT_PATH" > /dev/null + popd > /dev/null +} + +copy_hook pre-commit pre-commit +copy_hook pre-push pre-push diff --git a/githooks/pre-commit b/githooks/pre-commit new file mode 100755 index 0000000..f69101e --- /dev/null +++ b/githooks/pre-commit @@ -0,0 +1,11 @@ +#!/bin/bash +set -o errexit +set -o nounset +set -o pipefail + +REPO_DIR=$(git rev-parse --show-toplevel) +GITHOOKS_PATH="$REPO_DIR/githooks" +pushd "$REPO_DIR" +bash "$GITHOOKS_PATH/prohibit_commit_to_main.sh" +bash "$GITHOOKS_PATH/update_packaging.sh" +popd diff --git a/githooks/pre-push b/githooks/pre-push new file mode 100755 index 0000000..d7dbac6 --- /dev/null +++ b/githooks/pre-push @@ -0,0 +1,43 @@ +#!/bin/bash +protected_branches=( main ) +for i in "${protected_branches[@]}" +do + + protected_branch=$i + + policy='[Policy] Never push, force push or delete the '$protected_branch' branch! (Prevented with pre-push hook.)' + + current_branch=$(git symbolic-ref HEAD | sed -e 's,.*/\(.*\),\1,') + + push_command=$(ps -ocommand= -p $PPID) + + is_destructive='force|delete|\-f' + + will_remove_protected_branch=':'$protected_branch + + do_exit(){ + echo $policy + exit 1 + } + + if [[ $push_command =~ $is_destructive ]] && [ $current_branch = $protected_branch ]; then + do_exit + fi + + if [[ $push_command =~ $is_destructive ]] && [[ $push_command =~ $protected_branch ]]; then + do_exit + fi + + if [[ $push_command =~ $will_remove_protected_branch ]]; then + do_exit + fi + + if [[ $protected_branch == $current_branch ]]; then + do_exit + fi + +done + +unset do_exit + +exit 0 diff --git a/githooks/prohibit_commit_to_main.sh b/githooks/prohibit_commit_to_main.sh new file mode 100755 index 0000000..016b496 --- /dev/null +++ b/githooks/prohibit_commit_to_main.sh @@ -0,0 +1,8 @@ +#!/bin/sh + +branch="$(git rev-parse --abbrev-ref HEAD)" + +if [ "$branch" = "main" ]; then + echo "You can't commit directly to main branch" + exit 1 +fi diff --git a/githooks/update_packaging.sh b/githooks/update_packaging.sh new file mode 100755 index 0000000..b84d29f --- /dev/null +++ b/githooks/update_packaging.sh @@ -0,0 +1,44 @@ +#!/bin/bash +set -o errexit +set -o nounset +set -o pipefail + +# define colors for use in output +no_color='\033[0m' +grey='\033[0;90m' + +# Jump to the current project's root directory (the one containing +# .git/) +ROOT_DIR=$(git rev-parse --show-toplevel || echo) +NO_GIT=FALSE +if [ -z "$ROOT_DIR" ] +then + echo "Did not found git repository, using '$PWD' as ROOT_DIR" + NO_GIT=TRUE + ROOT_DIR=$PWD +fi + +#pushd "$ROOT_DIR" > /dev/null +pushd "$ROOT_DIR" + +echo -e "Generate setup.py ${grey}(pre-commit hook)${no_color}" +if [ -d "dist" ] +then + rm -r "dist" +fi +poetry build > /dev/null +pushd dist > /dev/null +tar_file=$(ls -- *.tar.gz) +extracted_dir=${tar_file%.tar.gz} +tar -xf "$tar_file" +cp "$extracted_dir/setup.py" ../setup.py +rm -r "$extracted_dir" +popd > /dev/null + +if [ "$NO_GIT" == "FALSE" ] +then + echo -e "Add generated files ${grey}(pre-commit hook)${no_color}" + git add setup.py +fi + +popd > /dev/null diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..41afc21 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1146 @@ +[[package]] +name = "atomicwrites" +version = "1.4.0" +description = "Atomic file writes." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "attrs" +version = "21.4.0" +description = "Classes Without Boilerplate" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.extras] +docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] + +[[package]] +name = "aws-sam-translator" +version = "1.45.0" +description = "AWS SAM Translator is a library that transform SAM templates into AWS CloudFormation templates" +category = "dev" +optional = false +python-versions = ">=3.7, <=4.0, !=4.0" + +[package.dependencies] +boto3 = ">=1.5,<2.0" +jsonschema = ">=3.2,<4.0" + +[package.extras] +dev = ["coverage (>=5.3,<6.0)", "flake8 (>=3.8.4,<3.9.0)", "tox (>=3.24,<4.0)", "pytest-cov (>=2.10.1,<2.11.0)", "pytest-xdist (>=2.5,<3.0)", "pytest-env (>=0.6.2,<0.7.0)", "pylint (>=2.9.0,<2.10.0)", "pyyaml (>=5.4,<6.0)", "pytest (>=6.2.5,<6.3.0)", "parameterized (>=0.7.4,<0.8.0)", "click (>=7.1,<8.0)", "dateparser (>=0.7,<1.0)", "boto3 (>=1.17,<2.0)", "requests (>=2.24.0,<2.25.0)", "docopt (>=0.6.2,<0.7.0)", "black (==20.8b1)"] + +[[package]] +name = "boto3" +version = "1.22.2" +description = "The AWS SDK for Python" +category = "main" +optional = false +python-versions = ">= 3.6" + +[package.dependencies] +botocore = ">=1.25.2,<1.26.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.5.0,<0.6.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] + +[[package]] +name = "botocore" +version = "1.25.2" +description = "Low-level, data-driven core of boto 3." +category = "main" +optional = false +python-versions = ">= 3.6" + +[package.dependencies] +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = ">=1.25.4,<1.27" + +[package.extras] +crt = ["awscrt (==0.13.8)"] + +[[package]] +name = "certifi" +version = "2021.10.8" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "cfn-lint" +version = "0.59.0" +description = "Checks CloudFormation templates for practices and behaviour that could potentially be improved" +category = "dev" +optional = false +python-versions = ">=3.6, <=4.0, !=4.0" + +[package.dependencies] +aws-sam-translator = ">=1.45.0" +jschema-to-python = ">=1.2.3,<1.3.0" +jsonpatch = "*" +jsonschema = ">=3.0,<4.0" +junit-xml = ">=1.9,<2.0" +networkx = ">=2.4,<3.0" +pyyaml = ">5.4" +sarif-om = ">=1.0.4,<1.1.0" + +[[package]] +name = "charset-normalizer" +version = "2.0.12" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" +optional = false +python-versions = ">=3.5.0" + +[package.extras] +unicode_backport = ["unicodedata2"] + +[[package]] +name = "click" +version = "8.1.2" +description = "Composable command line interface toolkit" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.4" +description = "Cross-platform colored terminal text." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "docker" +version = "5.0.3" +description = "A Python library for the Docker Engine API." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +requests = ">=2.14.2,<2.18.0 || >2.18.0" +websocket-client = ">=0.32.0" + +[package.extras] +tls = ["pyOpenSSL (>=17.5.0)", "cryptography (>=3.4.7)", "idna (>=2.0.0)"] +ssh = ["paramiko (>=2.4.2)"] + +[[package]] +name = "docutils" +version = "0.18.1" +description = "Docutils -- Python Documentation Utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "exasol-error-reporting-python" +version = "0.2.0" +description = "Exasol Python Error Reporting" +category = "main" +optional = false +python-versions = ">=3.6.1,<4.0" +develop = false + +[package.source] +type = "git" +url = "https://github.com/exasol/error-reporting-python.git" +reference = "main" +resolved_reference = "e21f181502b32f18d09660dde84cc20159c27b29" + +[[package]] +name = "exasol-integration-test-docker-environment" +version = "0.10.0" +description = "Integration Test Docker Environment for Exasol" +category = "main" +optional = false +python-versions = ">=3.6,<4" + +[package.dependencies] +click = ">=7.0" +docker = {version = ">=4.0.0", markers = "sys_platform != \"win32\""} +gitpython = ">=2.1.0" +humanfriendly = ">=4.18" +jinja2 = ">=2.10.1" +jsonpickle = ">=1.1" +luigi = ">=2.8.4" +netaddr = ">=0.7.19" +networkx = ">=2.3" +pydot = ">=1.4.0" +requests = ">=2.21.0" +simplejson = ">=3.16.0" +"stopwatch.py" = ">=1.0.0" + +[package.source] +type = "url" +url = "https://github.com/exasol/integration-test-docker-environment/releases/download/0.10.0/exasol_integration_test_docker_environment-0.10.0-py3-none-any.whl" + +[[package]] +name = "exasol-script-languages-container-tool" +version = "0.12.0" +description = "Script Languages Container Tool" +category = "main" +optional = false +python-versions = ">=3.6,<4" + +[package.dependencies] +exasol-integration-test-docker-environment = {url = "https://github.com/exasol/integration-test-docker-environment/releases/download/0.10.0/exasol_integration_test_docker_environment-0.10.0-py3-none-any.whl"} +importlib_metadata = ">=4.6.0" + +[package.source] +type = "url" +url = "https://github.com/exasol/script-languages-container-tool/releases/download/0.12.0/exasol_script_languages_container_tool-0.12.0-py3-none-any.whl" +[[package]] +name = "gitdb" +version = "4.0.9" +description = "Git Object Database" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +smmap = ">=3.0.1,<6" + +[[package]] +name = "gitpython" +version = "3.1.27" +description = "GitPython is a python library used to interact with Git repositories" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +gitdb = ">=4.0.1,<5" + +[[package]] +name = "humanfriendly" +version = "10.0" +description = "Human friendly output for text interfaces using Python" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.dependencies] +pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_version >= \"3.8\""} + +[[package]] +name = "idna" +version = "3.3" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "importlib-metadata" +version = "4.11.3" +description = "Read metadata from Python packages" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] +perf = ["ipython"] + +[[package]] +name = "iniconfig" +version = "1.1.1" +description = "iniconfig: brain-dead simple config-ini parsing" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "jinja2" +version = "3.1.1" +description = "A very fast and expressive template engine." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jmespath" +version = "1.0.0" +description = "JSON Matching Expressions" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "jschema-to-python" +version = "1.2.3" +description = "Generate source code for Python classes from a JSON schema." +category = "dev" +optional = false +python-versions = ">= 2.7" + +[package.dependencies] +attrs = "*" +jsonpickle = "*" +pbr = "*" + +[[package]] +name = "jsonpatch" +version = "1.32" +description = "Apply JSON-Patches (RFC 6902)" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.dependencies] +jsonpointer = ">=1.9" + +[[package]] +name = "jsonpickle" +version = "2.1.0" +description = "Python library for serializing any arbitrary object graph into JSON" +category = "main" +optional = false +python-versions = ">=2.7" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] +"testing.libs" = ["demjson", "simplejson", "ujson", "yajl"] +testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-black-multipy", "pytest-cov", "ecdsa", "feedparser", "numpy", "pandas", "pymongo", "scikit-learn", "sqlalchemy", "enum34", "jsonlib"] + +[[package]] +name = "jsonpointer" +version = "2.3" +description = "Identify specific nodes in a JSON document (RFC 6901)" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +six = ">=1.11.0" + +[package.extras] +format_nongpl = ["idna", "jsonpointer (>1.13)", "webcolors", "rfc3986-validator (>0.1.0)", "rfc3339-validator"] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] + +[[package]] +name = "junit-xml" +version = "1.9" +description = "Creates JUnit XML test result documents that can be read by tools such as Jenkins" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +six = "*" + +[[package]] +name = "lockfile" +version = "0.12.2" +description = "Platform-independent file locking module" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "luigi" +version = "3.0.3" +description = "Workflow mgmgt + task scheduling + dependency resolution." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +python-daemon = "*" +python-dateutil = ">=2.7.5,<3" +tenacity = ">=6.3.0,<7" +tornado = ">=5.0,<7" + +[package.extras] +prometheus = ["prometheus-client (==0.5.0)"] +toml = ["toml (<2.0.0)"] + +[[package]] +name = "markupsafe" +version = "2.1.1" +description = "Safely add untrusted strings to HTML/XML markup." +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "netaddr" +version = "0.8.0" +description = "A network address manipulation library for Python" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "networkx" +version = "2.8" +description = "Python package for creating and manipulating graphs and networks" +category = "main" +optional = false +python-versions = ">=3.8" + +[package.extras] +default = ["numpy (>=1.19)", "scipy (>=1.8)", "matplotlib (>=3.4)", "pandas (>=1.3)"] +doc = ["sphinx (>=4.5)", "pydata-sphinx-theme (>=0.8.1)", "sphinx-gallery (>=0.10)", "numpydoc (>=1.2)", "pillow (>=9.1)", "nb2plots (>=0.6)", "texext (>=0.6.6)"] +test = ["pytest (>=7.1)", "pytest-cov (>=3.0)", "codecov (>=2.1)"] +developer = ["pre-commit (>=2.18)", "mypy (>=0.942)"] +extra = ["lxml (>=4.6)", "pygraphviz (>=1.9)", "pydot (>=1.4.2)", "sympy (>=1.10)"] + +[[package]] +name = "packaging" +version = "21.3" +description = "Core utilities for Python packages" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" + +[[package]] +name = "pbr" +version = "5.8.1" +description = "Python Build Reasonableness" +category = "dev" +optional = false +python-versions = ">=2.6" + +[[package]] +name = "pluggy" +version = "1.0.0" +description = "plugin and hook calling mechanisms for python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +testing = ["pytest", "pytest-benchmark"] +dev = ["pre-commit", "tox"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "pydot" +version = "1.4.2" +description = "Python interface to Graphviz's Dot" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.dependencies] +pyparsing = ">=2.1.4" + +[[package]] +name = "pyparsing" +version = "3.0.8" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +category = "main" +optional = false +python-versions = ">=3.6.8" + +[package.extras] +diagrams = ["railroad-diagrams", "jinja2"] + +[[package]] +name = "pyreadline3" +version = "3.4.1" +description = "A python implementation of GNU readline." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "pyrsistent" +version = "0.18.1" +description = "Persistent/Functional/Immutable data structures" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "pytest" +version = "7.1.2" +description = "pytest: simple powerful testing with Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +tomli = ">=1.0.0" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] + +[[package]] +name = "pytest-mock" +version = "3.7.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +pytest = ">=5.0" + +[package.extras] +dev = ["pre-commit", "tox", "pytest-asyncio"] + +[[package]] +name = "python-daemon" +version = "2.3.0" +description = "Library to implement a well-behaved Unix daemon process." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +docutils = "*" +lockfile = ">=0.10" + +[package.extras] +test = ["coverage", "docutils", "testscenarios (>=0.4)", "testtools"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pyyaml" +version = "6.0" +description = "YAML parser and emitter for Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "requests" +version = "2.27.1" +description = "Python HTTP for Humans." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} +idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] +socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] + +[[package]] +name = "s3transfer" +version = "0.5.2" +description = "An Amazon S3 Transfer Manager" +category = "main" +optional = false +python-versions = ">= 3.6" + +[package.dependencies] +botocore = ">=1.12.36,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] + +[[package]] +name = "sarif-om" +version = "1.0.4" +description = "Classes implementing the SARIF 2.1.0 object model." +category = "dev" +optional = false +python-versions = ">= 2.7" + +[package.dependencies] +attrs = "*" +pbr = "*" + +[[package]] +name = "simplejson" +version = "3.17.6" +description = "Simple, fast, extensible JSON encoder/decoder for Python" +category = "main" +optional = false +python-versions = ">=2.5, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "smmap" +version = "5.0.0" +description = "A pure Python implementation of a sliding window memory map manager" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "stopwatch.py" +version = "1.0.1" +description = "A simple stopwatch for python" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "tenacity" +version = "6.3.1" +description = "Retry code until it succeeds" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +six = ">=1.9.0" + +[package.extras] +doc = ["reno", "sphinx", "tornado (>=4.5)"] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "dev" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "tornado" +version = "6.1" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +category = "main" +optional = false +python-versions = ">= 3.5" + +[[package]] +name = "urllib3" +version = "1.26.9" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" + +[package.extras] +brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "websocket-client" +version = "1.3.2" +description = "WebSocket client for Python with low level API options" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +test = ["websockets"] +docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] +optional = ["python-socks", "wsaccel"] + +[[package]] +name = "zipp" +version = "3.8.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] + +[metadata] +lock-version = "1.1" +python-versions = ">=3.8.0,<4.0" +content-hash = "b4b9d2debdba7583b5f600f60da500f1de2107130076d6bb1d0e68c78c792e0f" + +[metadata.files] +atomicwrites = [ + {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, + {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, +] +attrs = [ + {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, + {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, +] +aws-sam-translator = [ + {file = "aws-sam-translator-1.45.0.tar.gz", hash = "sha256:bf321ab62aa1731d3e471fd55de6f5d1ab07dfc169cd254aa523dd9ad30246f9"}, + {file = "aws_sam_translator-1.45.0-py2-none-any.whl", hash = "sha256:cd4761c01902e5103e60202373275886e59edcc778edf18ca22d380059ed44e7"}, + {file = "aws_sam_translator-1.45.0-py3-none-any.whl", hash = "sha256:40a6dd5a0aba32c7b38b0f5c54470396acdcd75e4b64251b015abdf922a18b5f"}, +] +boto3 = [ + {file = "boto3-1.22.2-py3-none-any.whl", hash = "sha256:1d0c9cf30c5c93ce4628b22f7dff21f80efd104706967be2afb47ea93dcbceb8"}, + {file = "boto3-1.22.2.tar.gz", hash = "sha256:fb9a66722a15b0a8edb4f29d63f013877c4c528eff1225bb31dab723c3e5a9ed"}, +] +botocore = [ + {file = "botocore-1.25.2-py3-none-any.whl", hash = "sha256:dff4aaf03c2b8ee4a861405fd35c83b585a661a541d520f51e863a31bb18e0f8"}, + {file = "botocore-1.25.2.tar.gz", hash = "sha256:78363cd023c6123a65d90080e14a9fb625a9639d970ab03471c6fe95258ab8c0"}, +] +certifi = [ + {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, + {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, +] +cfn-lint = [ + {file = "cfn-lint-0.59.0.tar.gz", hash = "sha256:2dab012912d5869506258f0d4bb15d8e7f0ac2117e75fa599b50764fd867dba2"}, + {file = "cfn_lint-0.59.0-py3-none-any.whl", hash = "sha256:e5e98712cb162ee70eedd0fd8eae8d45d6420d43502e6120ad768f00ff1eec05"}, +] +charset-normalizer = [ + {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, + {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, +] +click = [ + {file = "click-8.1.2-py3-none-any.whl", hash = "sha256:24e1a4a9ec5bf6299411369b208c1df2188d9eb8d916302fe6bf03faed227f1e"}, + {file = "click-8.1.2.tar.gz", hash = "sha256:479707fe14d9ec9a0757618b7a100a0ae4c4e236fac5b7f80ca68028141a1a72"}, +] +colorama = [ + {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, + {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, +] +docker = [ + {file = "docker-5.0.3-py2.py3-none-any.whl", hash = "sha256:7a79bb439e3df59d0a72621775d600bc8bc8b422d285824cb37103eab91d1ce0"}, + {file = "docker-5.0.3.tar.gz", hash = "sha256:d916a26b62970e7c2f554110ed6af04c7ccff8e9f81ad17d0d40c75637e227fb"}, +] +docutils = [ + {file = "docutils-0.18.1-py2.py3-none-any.whl", hash = "sha256:23010f129180089fbcd3bc08cfefccb3b890b0050e1ca00c867036e9d161b98c"}, + {file = "docutils-0.18.1.tar.gz", hash = "sha256:679987caf361a7539d76e584cbeddc311e3aee937877c87346f31debc63e9d06"}, +] +exasol-error-reporting-python = [] +exasol-integration-test-docker-environment = [] +exasol-script-languages-container-tool = [] +gitdb = [ + {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, + {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, +] +gitpython = [ + {file = "GitPython-3.1.27-py3-none-any.whl", hash = "sha256:5b68b000463593e05ff2b261acff0ff0972df8ab1b70d3cdbd41b546c8b8fc3d"}, + {file = "GitPython-3.1.27.tar.gz", hash = "sha256:1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704"}, +] +humanfriendly = [ + {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"}, + {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"}, +] +idna = [ + {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, + {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, +] +importlib-metadata = [ + {file = "importlib_metadata-4.11.3-py3-none-any.whl", hash = "sha256:1208431ca90a8cca1a6b8af391bb53c1a2db74e5d1cef6ddced95d4b2062edc6"}, + {file = "importlib_metadata-4.11.3.tar.gz", hash = "sha256:ea4c597ebf37142f827b8f39299579e31685c31d3a438b59f469406afd0f2539"}, +] +iniconfig = [ + {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, + {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, +] +jinja2 = [ + {file = "Jinja2-3.1.1-py3-none-any.whl", hash = "sha256:539835f51a74a69f41b848a9645dbdc35b4f20a3b601e2d9a7e22947b15ff119"}, + {file = "Jinja2-3.1.1.tar.gz", hash = "sha256:640bed4bb501cbd17194b3cace1dc2126f5b619cf068a726b98192a0fde74ae9"}, +] +jmespath = [ + {file = "jmespath-1.0.0-py3-none-any.whl", hash = "sha256:e8dcd576ed616f14ec02eed0005c85973b5890083313860136657e24784e4c04"}, + {file = "jmespath-1.0.0.tar.gz", hash = "sha256:a490e280edd1f57d6de88636992d05b71e97d69a26a19f058ecf7d304474bf5e"}, +] +jschema-to-python = [ + {file = "jschema_to_python-1.2.3-py3-none-any.whl", hash = "sha256:8a703ca7604d42d74b2815eecf99a33359a8dccbb80806cce386d5e2dd992b05"}, + {file = "jschema_to_python-1.2.3.tar.gz", hash = "sha256:76ff14fe5d304708ccad1284e4b11f96a658949a31ee7faed9e0995279549b91"}, +] +jsonpatch = [ + {file = "jsonpatch-1.32-py2.py3-none-any.whl", hash = "sha256:26ac385719ac9f54df8a2f0827bb8253aa3ea8ab7b3368457bcdb8c14595a397"}, + {file = "jsonpatch-1.32.tar.gz", hash = "sha256:b6ddfe6c3db30d81a96aaeceb6baf916094ffa23d7dd5fa2c13e13f8b6e600c2"}, +] +jsonpickle = [ + {file = "jsonpickle-2.1.0-py2.py3-none-any.whl", hash = "sha256:1dee77ddc5d652dfdabc33d33cff9d7e131d428007007da4fd6f7071ae774b0f"}, + {file = "jsonpickle-2.1.0.tar.gz", hash = "sha256:84684cfc5338a534173c8dd69809e40f2865d0be1f8a2b7af8465e5b968dcfa9"}, +] +jsonpointer = [ + {file = "jsonpointer-2.3-py2.py3-none-any.whl", hash = "sha256:51801e558539b4e9cd268638c078c6c5746c9ac96bc38152d443400e4f3793e9"}, + {file = "jsonpointer-2.3.tar.gz", hash = "sha256:97cba51526c829282218feb99dab1b1e6bdf8efd1c43dc9d57be093c0d69c99a"}, +] +jsonschema = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] +junit-xml = [ + {file = "junit_xml-1.9-py2.py3-none-any.whl", hash = "sha256:ec5ca1a55aefdd76d28fcc0b135251d156c7106fa979686a4b48d62b761b4732"}, +] +lockfile = [ + {file = "lockfile-0.12.2-py2.py3-none-any.whl", hash = "sha256:6c3cb24f344923d30b2785d5ad75182c8ea7ac1b6171b08657258ec7429d50fa"}, + {file = "lockfile-0.12.2.tar.gz", hash = "sha256:6aed02de03cba24efabcd600b30540140634fc06cfa603822d508d5361e9f799"}, +] +luigi = [ + {file = "luigi-3.0.3.tar.gz", hash = "sha256:7edc05a32bcff5aad28d7c7e3b15b761ef13fe2a495692602ebf0800eba66849"}, +] +markupsafe = [ + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, + {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, +] +netaddr = [ + {file = "netaddr-0.8.0-py2.py3-none-any.whl", hash = "sha256:9666d0232c32d2656e5e5f8d735f58fd6c7457ce52fc21c98d45f2af78f990ac"}, + {file = "netaddr-0.8.0.tar.gz", hash = "sha256:d6cc57c7a07b1d9d2e917aa8b36ae8ce61c35ba3fcd1b83ca31c5a0ee2b5a243"}, +] +networkx = [ + {file = "networkx-2.8-py3-none-any.whl", hash = "sha256:1a1e8fe052cc1b4e0339b998f6795099562a264a13a5af7a32cad45ab9d4e126"}, + {file = "networkx-2.8.tar.gz", hash = "sha256:4a52cf66aed221955420e11b3e2e05ca44196b4829aab9576d4d439212b0a14f"}, +] +packaging = [ + {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, + {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, +] +pbr = [ + {file = "pbr-5.8.1-py2.py3-none-any.whl", hash = "sha256:27108648368782d07bbf1cb468ad2e2eeef29086affd14087a6d04b7de8af4ec"}, + {file = "pbr-5.8.1.tar.gz", hash = "sha256:66bc5a34912f408bb3925bf21231cb6f59206267b7f63f3503ef865c1a292e25"}, +] +pluggy = [ + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, +] +py = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] +pydot = [ + {file = "pydot-1.4.2-py2.py3-none-any.whl", hash = "sha256:66c98190c65b8d2e2382a441b4c0edfdb4f4c025ef9cb9874de478fb0793a451"}, + {file = "pydot-1.4.2.tar.gz", hash = "sha256:248081a39bcb56784deb018977e428605c1c758f10897a339fce1dd728ff007d"}, +] +pyparsing = [ + {file = "pyparsing-3.0.8-py3-none-any.whl", hash = "sha256:ef7b523f6356f763771559412c0d7134753f037822dad1b16945b7b846f7ad06"}, + {file = "pyparsing-3.0.8.tar.gz", hash = "sha256:7bf433498c016c4314268d95df76c81b842a4cb2b276fa3312cfb1e1d85f6954"}, +] +pyreadline3 = [ + {file = "pyreadline3-3.4.1-py3-none-any.whl", hash = "sha256:b0efb6516fd4fb07b45949053826a62fa4cb353db5be2bbb4a7aa1fdd1e345fb"}, + {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"}, +] +pyrsistent = [ + {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"}, + {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26"}, + {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e"}, + {file = "pyrsistent-0.18.1-cp310-cp310-win32.whl", hash = "sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6"}, + {file = "pyrsistent-0.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-win32.whl", hash = "sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286"}, + {file = "pyrsistent-0.18.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6"}, + {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec"}, + {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c"}, + {file = "pyrsistent-0.18.1-cp38-cp38-win32.whl", hash = "sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca"}, + {file = "pyrsistent-0.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a"}, + {file = "pyrsistent-0.18.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5"}, + {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045"}, + {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c"}, + {file = "pyrsistent-0.18.1-cp39-cp39-win32.whl", hash = "sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc"}, + {file = "pyrsistent-0.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07"}, + {file = "pyrsistent-0.18.1.tar.gz", hash = "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96"}, +] +pytest = [ + {file = "pytest-7.1.2-py3-none-any.whl", hash = "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c"}, + {file = "pytest-7.1.2.tar.gz", hash = "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45"}, +] +pytest-mock = [ + {file = "pytest-mock-3.7.0.tar.gz", hash = "sha256:5112bd92cc9f186ee96e1a92efc84969ea494939c3aead39c50f421c4cc69534"}, + {file = "pytest_mock-3.7.0-py3-none-any.whl", hash = "sha256:6cff27cec936bf81dc5ee87f07132b807bcda51106b5ec4b90a04331cba76231"}, +] +python-daemon = [ + {file = "python-daemon-2.3.0.tar.gz", hash = "sha256:bda993f1623b1197699716d68d983bb580043cf2b8a66a01274d9b8297b0aeaf"}, + {file = "python_daemon-2.3.0-py2.py3-none-any.whl", hash = "sha256:191c7b67b8f7aac58849abf54e19fe1957ef7290c914210455673028ad454989"}, +] +python-dateutil = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] +pyyaml = [ + {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, + {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, + {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, + {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, + {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, + {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, + {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, + {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, + {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, + {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, + {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, + {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, + {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, + {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, +] +requests = [ + {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, + {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, +] +s3transfer = [ + {file = "s3transfer-0.5.2-py3-none-any.whl", hash = "sha256:7a6f4c4d1fdb9a2b640244008e142cbc2cd3ae34b386584ef044dd0f27101971"}, + {file = "s3transfer-0.5.2.tar.gz", hash = "sha256:95c58c194ce657a5f4fb0b9e60a84968c808888aed628cd98ab8771fe1db98ed"}, +] +sarif-om = [ + {file = "sarif_om-1.0.4-py3-none-any.whl", hash = "sha256:539ef47a662329b1c8502388ad92457425e95dc0aaaf995fe46f4984c4771911"}, + {file = "sarif_om-1.0.4.tar.gz", hash = "sha256:cd5f416b3083e00d402a92e449a7ff67af46f11241073eea0461802a3b5aef98"}, +] +simplejson = [ + {file = "simplejson-3.17.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a89acae02b2975b1f8e4974cb8cdf9bf9f6c91162fb8dec50c259ce700f2770a"}, + {file = "simplejson-3.17.6-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:82ff356ff91be0ab2293fc6d8d262451eb6ac4fd999244c4b5f863e049ba219c"}, + {file = "simplejson-3.17.6-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:0de783e9c2b87bdd75b57efa2b6260c24b94605b5c9843517577d40ee0c3cc8a"}, + {file = "simplejson-3.17.6-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:d24a9e61df7a7787b338a58abfba975414937b609eb6b18973e25f573bc0eeeb"}, + {file = "simplejson-3.17.6-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:e8603e691580487f11306ecb066c76f1f4a8b54fb3bdb23fa40643a059509366"}, + {file = "simplejson-3.17.6-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:9b01e7b00654115965a206e3015f0166674ec1e575198a62a977355597c0bef5"}, + {file = "simplejson-3.17.6-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:37bc0cf0e5599f36072077e56e248f3336917ded1d33d2688624d8ed3cefd7d2"}, + {file = "simplejson-3.17.6-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:cf6e7d5fe2aeb54898df18db1baf479863eae581cce05410f61f6b4188c8ada1"}, + {file = "simplejson-3.17.6-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:bdfc54b4468ed4cd7415928cbe782f4d782722a81aeb0f81e2ddca9932632211"}, + {file = "simplejson-3.17.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd16302d39c4d6f4afde80edd0c97d4db643327d355a312762ccd9bd2ca515ed"}, + {file = "simplejson-3.17.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:deac4bdafa19bbb89edfb73b19f7f69a52d0b5bd3bb0c4ad404c1bbfd7b4b7fd"}, + {file = "simplejson-3.17.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8bbdb166e2fb816e43ab034c865147edafe28e1b19c72433147789ac83e2dda"}, + {file = "simplejson-3.17.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7854326920d41c3b5d468154318fe6ba4390cb2410480976787c640707e0180"}, + {file = "simplejson-3.17.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:04e31fa6ac8e326480703fb6ded1488bfa6f1d3f760d32e29dbf66d0838982ce"}, + {file = "simplejson-3.17.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f63600ec06982cdf480899026f4fda622776f5fabed9a869fdb32d72bc17e99a"}, + {file = "simplejson-3.17.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e03c3b8cc7883a54c3f34a6a135c4a17bc9088a33f36796acdb47162791b02f6"}, + {file = "simplejson-3.17.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a2d30d6c1652140181dc6861f564449ad71a45e4f165a6868c27d36745b65d40"}, + {file = "simplejson-3.17.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a1aa6e4cae8e3b8d5321be4f51c5ce77188faf7baa9fe1e78611f93a8eed2882"}, + {file = "simplejson-3.17.6-cp310-cp310-win32.whl", hash = "sha256:97202f939c3ff341fc3fa84d15db86156b1edc669424ba20b0a1fcd4a796a045"}, + {file = "simplejson-3.17.6-cp310-cp310-win_amd64.whl", hash = "sha256:80d3bc9944be1d73e5b1726c3bbfd2628d3d7fe2880711b1eb90b617b9b8ac70"}, + {file = "simplejson-3.17.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9fa621b3c0c05d965882c920347b6593751b7ab20d8fa81e426f1735ca1a9fc7"}, + {file = "simplejson-3.17.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd2fb11922f58df8528adfca123f6a84748ad17d066007e7ac977720063556bd"}, + {file = "simplejson-3.17.6-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:724c1fe135aa437d5126138d977004d165a3b5e2ee98fc4eb3e7c0ef645e7e27"}, + {file = "simplejson-3.17.6-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4ff4ac6ff3aa8f814ac0f50bf218a2e1a434a17aafad4f0400a57a8cc62ef17f"}, + {file = "simplejson-3.17.6-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:67093a526e42981fdd954868062e56c9b67fdd7e712616cc3265ad0c210ecb51"}, + {file = "simplejson-3.17.6-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5d6b4af7ad7e4ac515bc6e602e7b79e2204e25dbd10ab3aa2beef3c5a9cad2c7"}, + {file = "simplejson-3.17.6-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:1c9b1ed7ed282b36571638297525f8ef80f34b3e2d600a56f962c6044f24200d"}, + {file = "simplejson-3.17.6-cp36-cp36m-win32.whl", hash = "sha256:632ecbbd2228575e6860c9e49ea3cc5423764d5aa70b92acc4e74096fb434044"}, + {file = "simplejson-3.17.6-cp36-cp36m-win_amd64.whl", hash = "sha256:4c09868ddb86bf79b1feb4e3e7e4a35cd6e61ddb3452b54e20cf296313622566"}, + {file = "simplejson-3.17.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b6bd8144f15a491c662f06814bd8eaa54b17f26095bb775411f39bacaf66837"}, + {file = "simplejson-3.17.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5decdc78849617917c206b01e9fc1d694fd58caa961be816cb37d3150d613d9a"}, + {file = "simplejson-3.17.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:521877c7bd060470806eb6335926e27453d740ac1958eaf0d8c00911bc5e1802"}, + {file = "simplejson-3.17.6-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:65b998193bd7b0c7ecdfffbc825d808eac66279313cb67d8892bb259c9d91494"}, + {file = "simplejson-3.17.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ac786f6cb7aa10d44e9641c7a7d16d7f6e095b138795cd43503769d4154e0dc2"}, + {file = "simplejson-3.17.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3ff5b3464e1ce86a8de8c88e61d4836927d5595c2162cab22e96ff551b916e81"}, + {file = "simplejson-3.17.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:69bd56b1d257a91e763256d63606937ae4eb890b18a789b66951c00062afec33"}, + {file = "simplejson-3.17.6-cp37-cp37m-win32.whl", hash = "sha256:b81076552d34c27e5149a40187a8f7e2abb2d3185576a317aaf14aeeedad862a"}, + {file = "simplejson-3.17.6-cp37-cp37m-win_amd64.whl", hash = "sha256:07ecaafc1b1501f275bf5acdee34a4ad33c7c24ede287183ea77a02dc071e0c0"}, + {file = "simplejson-3.17.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:068670af975247acbb9fc3d5393293368cda17026db467bf7a51548ee8f17ee1"}, + {file = "simplejson-3.17.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4d1c135af0c72cb28dd259cf7ba218338f4dc027061262e46fe058b4e6a4c6a3"}, + {file = "simplejson-3.17.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:23fe704da910ff45e72543cbba152821685a889cf00fc58d5c8ee96a9bad5f94"}, + {file = "simplejson-3.17.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f444762fed1bc1fd75187ef14a20ed900c1fbb245d45be9e834b822a0223bc81"}, + {file = "simplejson-3.17.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:681eb4d37c9a9a6eb9b3245a5e89d7f7b2b9895590bb08a20aa598c1eb0a1d9d"}, + {file = "simplejson-3.17.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8e8607d8f6b4f9d46fee11447e334d6ab50e993dd4dbfb22f674616ce20907ab"}, + {file = "simplejson-3.17.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b10556817f09d46d420edd982dd0653940b90151d0576f09143a8e773459f6fe"}, + {file = "simplejson-3.17.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e1ec8a9ee0987d4524ffd6299e778c16cc35fef6d1a2764e609f90962f0b293a"}, + {file = "simplejson-3.17.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0b4126cac7d69ac06ff22efd3e0b3328a4a70624fcd6bca4fc1b4e6d9e2e12bf"}, + {file = "simplejson-3.17.6-cp38-cp38-win32.whl", hash = "sha256:35a49ebef25f1ebdef54262e54ae80904d8692367a9f208cdfbc38dbf649e00a"}, + {file = "simplejson-3.17.6-cp38-cp38-win_amd64.whl", hash = "sha256:743cd768affaa508a21499f4858c5b824ffa2e1394ed94eb85caf47ac0732198"}, + {file = "simplejson-3.17.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fb62d517a516128bacf08cb6a86ecd39fb06d08e7c4980251f5d5601d29989ba"}, + {file = "simplejson-3.17.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:12133863178a8080a3dccbf5cb2edfab0001bc41e5d6d2446af2a1131105adfe"}, + {file = "simplejson-3.17.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5540fba2d437edaf4aa4fbb80f43f42a8334206ad1ad3b27aef577fd989f20d9"}, + {file = "simplejson-3.17.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d74ee72b5071818a1a5dab47338e87f08a738cb938a3b0653b9e4d959ddd1fd9"}, + {file = "simplejson-3.17.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:28221620f4dcabdeac310846629b976e599a13f59abb21616356a85231ebd6ad"}, + {file = "simplejson-3.17.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b09bc62e5193e31d7f9876220fb429ec13a6a181a24d897b9edfbbdbcd678851"}, + {file = "simplejson-3.17.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7255a37ff50593c9b2f1afa8fafd6ef5763213c1ed5a9e2c6f5b9cc925ab979f"}, + {file = "simplejson-3.17.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:401d40969cee3df7bda211e57b903a534561b77a7ade0dd622a8d1a31eaa8ba7"}, + {file = "simplejson-3.17.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a649d0f66029c7eb67042b15374bd93a26aae202591d9afd71e111dd0006b198"}, + {file = "simplejson-3.17.6-cp39-cp39-win32.whl", hash = "sha256:522fad7be85de57430d6d287c4b635813932946ebf41b913fe7e880d154ade2e"}, + {file = "simplejson-3.17.6-cp39-cp39-win_amd64.whl", hash = "sha256:3fe87570168b2ae018391e2b43fbf66e8593a86feccb4b0500d134c998983ccc"}, + {file = "simplejson-3.17.6.tar.gz", hash = "sha256:cf98038d2abf63a1ada5730e91e84c642ba6c225b0198c3684151b1f80c5f8a6"}, +] +six = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] +smmap = [ + {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, + {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, +] +"stopwatch.py" = [ + {file = "stopwatch.py-1.0.1.tar.gz", hash = "sha256:861fee329c470017ea7d617326645ae2e6896cf9b7016a6b74a15b4287e4fd90"}, +] +tenacity = [ + {file = "tenacity-6.3.1-py2.py3-none-any.whl", hash = "sha256:baed357d9f35ec64264d8a4bbf004c35058fad8795c5b0d8a7dc77ecdcbb8f39"}, + {file = "tenacity-6.3.1.tar.gz", hash = "sha256:e14d191fb0a309b563904bbc336582efe2037de437e543b38da749769b544d7f"}, +] +toml = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] +tomli = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] +tornado = [ + {file = "tornado-6.1-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:d371e811d6b156d82aa5f9a4e08b58debf97c302a35714f6f45e35139c332e32"}, + {file = "tornado-6.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:0d321a39c36e5f2c4ff12b4ed58d41390460f798422c4504e09eb5678e09998c"}, + {file = "tornado-6.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9de9e5188a782be6b1ce866e8a51bc76a0fbaa0e16613823fc38e4fc2556ad05"}, + {file = "tornado-6.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:61b32d06ae8a036a6607805e6720ef00a3c98207038444ba7fd3d169cd998910"}, + {file = "tornado-6.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:3e63498f680547ed24d2c71e6497f24bca791aca2fe116dbc2bd0ac7f191691b"}, + {file = "tornado-6.1-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:6c77c9937962577a6a76917845d06af6ab9197702a42e1346d8ae2e76b5e3675"}, + {file = "tornado-6.1-cp35-cp35m-win32.whl", hash = "sha256:6286efab1ed6e74b7028327365cf7346b1d777d63ab30e21a0f4d5b275fc17d5"}, + {file = "tornado-6.1-cp35-cp35m-win_amd64.whl", hash = "sha256:fa2ba70284fa42c2a5ecb35e322e68823288a4251f9ba9cc77be04ae15eada68"}, + {file = "tornado-6.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0a00ff4561e2929a2c37ce706cb8233b7907e0cdc22eab98888aca5dd3775feb"}, + {file = "tornado-6.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:748290bf9112b581c525e6e6d3820621ff020ed95af6f17fedef416b27ed564c"}, + {file = "tornado-6.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:e385b637ac3acaae8022e7e47dfa7b83d3620e432e3ecb9a3f7f58f150e50921"}, + {file = "tornado-6.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:25ad220258349a12ae87ede08a7b04aca51237721f63b1808d39bdb4b2164558"}, + {file = "tornado-6.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:65d98939f1a2e74b58839f8c4dab3b6b3c1ce84972ae712be02845e65391ac7c"}, + {file = "tornado-6.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:e519d64089b0876c7b467274468709dadf11e41d65f63bba207e04217f47c085"}, + {file = "tornado-6.1-cp36-cp36m-win32.whl", hash = "sha256:b87936fd2c317b6ee08a5741ea06b9d11a6074ef4cc42e031bc6403f82a32575"}, + {file = "tornado-6.1-cp36-cp36m-win_amd64.whl", hash = "sha256:cc0ee35043162abbf717b7df924597ade8e5395e7b66d18270116f8745ceb795"}, + {file = "tornado-6.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7250a3fa399f08ec9cb3f7b1b987955d17e044f1ade821b32e5f435130250d7f"}, + {file = "tornado-6.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:ed3ad863b1b40cd1d4bd21e7498329ccaece75db5a5bf58cd3c9f130843e7102"}, + {file = "tornado-6.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:dcef026f608f678c118779cd6591c8af6e9b4155c44e0d1bc0c87c036fb8c8c4"}, + {file = "tornado-6.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:70dec29e8ac485dbf57481baee40781c63e381bebea080991893cd297742b8fd"}, + {file = "tornado-6.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:d3f7594930c423fd9f5d1a76bee85a2c36fd8b4b16921cae7e965f22575e9c01"}, + {file = "tornado-6.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3447475585bae2e77ecb832fc0300c3695516a47d46cefa0528181a34c5b9d3d"}, + {file = "tornado-6.1-cp37-cp37m-win32.whl", hash = "sha256:e7229e60ac41a1202444497ddde70a48d33909e484f96eb0da9baf8dc68541df"}, + {file = "tornado-6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:cb5ec8eead331e3bb4ce8066cf06d2dfef1bfb1b2a73082dfe8a161301b76e37"}, + {file = "tornado-6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:20241b3cb4f425e971cb0a8e4ffc9b0a861530ae3c52f2b0434e6c1b57e9fd95"}, + {file = "tornado-6.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:c77da1263aa361938476f04c4b6c8916001b90b2c2fdd92d8d535e1af48fba5a"}, + {file = "tornado-6.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:fba85b6cd9c39be262fcd23865652920832b61583de2a2ca907dbd8e8a8c81e5"}, + {file = "tornado-6.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:1e8225a1070cd8eec59a996c43229fe8f95689cb16e552d130b9793cb570a288"}, + {file = "tornado-6.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:d14d30e7f46a0476efb0deb5b61343b1526f73ebb5ed84f23dc794bdb88f9d9f"}, + {file = "tornado-6.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8f959b26f2634a091bb42241c3ed8d3cedb506e7c27b8dd5c7b9f745318ddbb6"}, + {file = "tornado-6.1-cp38-cp38-win32.whl", hash = "sha256:34ca2dac9e4d7afb0bed4677512e36a52f09caa6fded70b4e3e1c89dbd92c326"}, + {file = "tornado-6.1-cp38-cp38-win_amd64.whl", hash = "sha256:6196a5c39286cc37c024cd78834fb9345e464525d8991c21e908cc046d1cc02c"}, + {file = "tornado-6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f0ba29bafd8e7e22920567ce0d232c26d4d47c8b5cf4ed7b562b5db39fa199c5"}, + {file = "tornado-6.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:33892118b165401f291070100d6d09359ca74addda679b60390b09f8ef325ffe"}, + {file = "tornado-6.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7da13da6f985aab7f6f28debab00c67ff9cbacd588e8477034c0652ac141feea"}, + {file = "tornado-6.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:e0791ac58d91ac58f694d8d2957884df8e4e2f6687cdf367ef7eb7497f79eaa2"}, + {file = "tornado-6.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:66324e4e1beede9ac79e60f88de548da58b1f8ab4b2f1354d8375774f997e6c0"}, + {file = "tornado-6.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:a48900ecea1cbb71b8c71c620dee15b62f85f7c14189bdeee54966fbd9a0c5bd"}, + {file = "tornado-6.1-cp39-cp39-win32.whl", hash = "sha256:d3d20ea5782ba63ed13bc2b8c291a053c8d807a8fa927d941bd718468f7b950c"}, + {file = "tornado-6.1-cp39-cp39-win_amd64.whl", hash = "sha256:548430be2740e327b3fe0201abe471f314741efcb0067ec4f2d7dcfb4825f3e4"}, + {file = "tornado-6.1.tar.gz", hash = "sha256:33c6e81d7bd55b468d2e793517c909b139960b6c790a60b7991b9b6b76fb9791"}, +] +urllib3 = [ + {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"}, + {file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"}, +] +websocket-client = [ + {file = "websocket-client-1.3.2.tar.gz", hash = "sha256:50b21db0058f7a953d67cc0445be4b948d7fc196ecbeb8083d68d94628e4abf6"}, + {file = "websocket_client-1.3.2-py3-none-any.whl", hash = "sha256:722b171be00f2b90e1d4fb2f2b53146a536ca38db1da8ff49c972a4e1365d0ef"}, +] +zipp = [ + {file = "zipp-3.8.0-py3-none-any.whl", hash = "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099"}, + {file = "zipp-3.8.0.tar.gz", hash = "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad"}, +] diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..3ffd0a0 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,37 @@ +[tool.poetry] +name = "exasol-script-languages-container-ci-setup" +version = "0.1.0" +description = "Manages AWS cloud CI build infrastructure." + +license = "MIT" + +authors = [ + "Thomas Uebensee " +] + +[tool.poetry.dependencies] +python = ">=3.8.0,<4.0" +click = "^8.0.3" +jinja2 = ">=3.0.0" +exasol_error_reporting_python = { git = "https://github.com/exasol/error-reporting-python.git", branch = "main" } +exasol_script_languages_container_tool = { url = "https://github.com/exasol/script-languages-container-tool/releases/download/0.12.0/exasol_script_languages_container_tool-0.12.0-py3-none-any.whl" } +boto3 = "^1.21.34" +jsonschema = "^3.2.0" + +[build-system] +requires = ["poetry_core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + + +[tool.poetry.dev-dependencies] +toml = ">=0.10.2" +pytest = "^7.1.1" +pytest-mock = "^3.7.0" +cfn-lint = "^0.59.0" + +[tool.pytest.ini_options] +minversion = "6.0" + +testpaths = [ + "test" +] diff --git a/scripts/build/build_release.sh b/scripts/build/build_release.sh new file mode 100755 index 0000000..ef58c57 --- /dev/null +++ b/scripts/build/build_release.sh @@ -0,0 +1,12 @@ +#!/bin/bash +set -o errexit +set -o nounset +set -o pipefail + + +SCRIPT_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")" + +#shellcheck source=./scripts/build/setup_poetry_env.sh +source "$SCRIPT_DIR/setup_poetry_env.sh" "$@" + +poetry build diff --git a/scripts/build/check_git_status.sh b/scripts/build/check_git_status.sh new file mode 100755 index 0000000..7b8e873 --- /dev/null +++ b/scripts/build/check_git_status.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +#Prints git status and returns > 0 if working tree is dirty! If working tree is clean, it returns 0. + +git status --porcelain=v1 -uno +git diff --cached; git diff --cached --summary; +[ -z "$(git status --porcelain=v1 -uno 2>/dev/null)" ] diff --git a/scripts/build/check_release.py b/scripts/build/check_release.py new file mode 100644 index 0000000..9b3ebca --- /dev/null +++ b/scripts/build/check_release.py @@ -0,0 +1,53 @@ +import re +import sys +from pathlib import Path + +from git import Repo +import toml + + +def get_git_version(): + repo = Repo() + assert not repo.bare + tag_strings = sorted([t.name for t in repo.tags], reverse=True) + tag_strings = [t for t in tag_strings if t != "latest"] + + if len(tag_strings) == 0: + return "" + latest_tag = tag_strings[0].strip() + assert len(latest_tag) > 0 + return latest_tag + + +def get_poetry_version(): + parsed_toml = toml.load('pyproject.toml') + return parsed_toml["tool"]["poetry"]["version"].strip() + + +def get_change_log_version(): + # Path overloads __truediv__ + with open(Path(__file__).parent / ".." / ".." / "doc" / "changes" / "changelog.md") as changelog: + changelog_str = changelog.read() + # Search for the FIRST pattern like: "* [0.5.0](changes_0.5.0.md)" in the changelog file. + # Note that we encapsulate the [(0.5.0)] with parenthesis, which tells re to return the matching string as group + version_match = re.search(r"\* \[([0-9]+.[0-9]+.[0-9]+)]\(\S+\)", changelog_str) + return version_match.groups()[0] + + +if __name__ == '__main__': + poetry_version = get_poetry_version() + latest_tag = get_git_version() + changelog_version = get_change_log_version() + print(f'Changelog version: "{changelog_version}"', file=sys.stderr) + print(f'Current version: "{poetry_version}"', file=sys.stderr) + print(f'Latest git tag: "{latest_tag}"', file=sys.stderr) + + # We expect that the current version in pyproject.toml is alway greater than the latest tag. + # Thus we avoid creating a release without having the version number updated. + if poetry_version == latest_tag: + raise ValueError("Poetry version needs to be updated!") + + if changelog_version != poetry_version: + raise ValueError("Poetry version differs from Changelog version!") + + print("Everything looks good", file=sys.stderr) diff --git a/scripts/build/check_release.sh b/scripts/build/check_release.sh new file mode 100755 index 0000000..350c721 --- /dev/null +++ b/scripts/build/check_release.sh @@ -0,0 +1,11 @@ +#!/bin/bash +set -o errexit +set -o nounset +set -o pipefail + +SCRIPT_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")" + +#shellcheck source=./scripts/build/setup_poetry_env.sh +source "$SCRIPT_DIR/setup_poetry_env.sh" "$@" + +poetry run python3 -u "$SCRIPT_DIR/check_release.py" diff --git a/scripts/build/setup_poetry_env.sh b/scripts/build/setup_poetry_env.sh new file mode 100644 index 0000000..2bc4478 --- /dev/null +++ b/scripts/build/setup_poetry_env.sh @@ -0,0 +1,10 @@ +#!/bin/bash +set -o errexit +set -o nounset +set -o pipefail + +PYTHON_VERSION=$1 + +PYTHON_BIN=$(command -v "$PYTHON_VERSION") +poetry env use "$PYTHON_BIN" +poetry install diff --git a/scripts/build/shellcheck.sh b/scripts/build/shellcheck.sh new file mode 100755 index 0000000..1abcdc7 --- /dev/null +++ b/scripts/build/shellcheck.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +set -u + +interesting_paths=("scripts") + +SCRIPT_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")" +status=0 + +for path in "${interesting_paths[@]}"; do + find "$SCRIPT_DIR/../../$path" -name '*.sh' -type f -print0 | xargs -0 -n1 shellcheck -x + test $? -ne 0 && status=1 +done + +exit "$status" diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..d2fa32b --- /dev/null +++ b/setup.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +from setuptools import setup + +packages = \ +['exasol_script_languages_container_ci_setup', + 'exasol_script_languages_container_ci_setup.cli', + 'exasol_script_languages_container_ci_setup.cli.commands', + 'exasol_script_languages_container_ci_setup.cli.options', + 'exasol_script_languages_container_ci_setup.lib'] + +package_data = \ +{'': ['*'], 'exasol_script_languages_container_ci_setup': ['templates/*']} + +install_requires = \ +['boto3>=1.21.34,<2.0.0', + 'click>=8.0.3,<9.0.0', + 'exasol_error_reporting_python @ ' + 'git+https://github.com/exasol/error-reporting-python.git@main', + 'exasol_script_languages_container_tool @ ' + 'https://github.com/exasol/script-languages-container-tool/releases/download/0.12.0/exasol_script_languages_container_tool-0.12.0-py3-none-any.whl', + 'jinja2>=3.0.0', + 'jsonschema>=3.2.0,<4.0.0'] + +setup_kwargs = { + 'name': 'exasol-script-languages-container-ci-setup', + 'version': '0.1.0', + 'description': 'Manages AWS cloud CI build infrastructure.', + 'long_description': None, + 'author': 'Thomas Uebensee', + 'author_email': 'ext.thomas.uebensee@exasol.com', + 'maintainer': None, + 'maintainer_email': None, + 'url': None, + 'packages': packages, + 'package_data': package_data, + 'install_requires': install_requires, + 'python_requires': '>=3.8.0,<4.0', +} + + +setup(**setup_kwargs) diff --git a/test/__init__.py b/test/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/test/cloudformation_validation.py b/test/cloudformation_validation.py new file mode 100644 index 0000000..110f0b5 --- /dev/null +++ b/test/cloudformation_validation.py @@ -0,0 +1,19 @@ +import subprocess + + +def validate_using_cfn_lint(tmp_path, cloudformation_yml): + """ + This test uses cfn-lint to validate the Cloudformation template. + (See https://github.com/aws-cloudformation/cfn-lint) + """ + out_file = tmp_path / "slc_source_credential.yaml" + with open(out_file, "w") as f: + f.write(cloudformation_yml) + + completed_process = subprocess.run(["cfn-lint", str(out_file.absolute())], capture_output=True) + try: + completed_process.check_returncode() + except subprocess.CalledProcessError as e: + print(e.stdout) + raise e + diff --git a/test/test_deploy_ci.py b/test/test_deploy_ci.py new file mode 100644 index 0000000..40e4053 --- /dev/null +++ b/test/test_deploy_ci.py @@ -0,0 +1,39 @@ +from unittest.mock import MagicMock + +import pytest + +from exasol_script_languages_container_ci_setup.lib.aws_access import AwsAccess +from exasol_script_languages_container_ci_setup.lib.ci_build import run_deploy_ci_build, stack_name +from exasol_script_languages_container_ci_setup.lib.render_template import render_template +from test.cloudformation_validation import validate_using_cfn_lint + +PROJECT = "slc" +GH_URL = "https://github.com/slc" +DOCKERHUB_SECRET_ARN = "super_secret_arn" + + +@pytest.fixture +def ci_code_build_yml(): + return render_template("slc_code_build.yaml", project=PROJECT, + dockerhub_secret_arn=DOCKERHUB_SECRET_ARN, github_url=GH_URL) + + +def test_deploy_ci_upload_invoked(ci_code_build_yml): + """" + Test if function upload_cloudformation_stack() will be invoked + with expected values when we run run_deploy_ci_build() + """ + aws_access_mock = MagicMock() + aws_access_mock.read_dockerhub_secret_arn.return_value = DOCKERHUB_SECRET_ARN + run_deploy_ci_build(aws_access=aws_access_mock, project=PROJECT, + github_url=GH_URL) + aws_access_mock.upload_cloudformation_stack.assert_called_once_with(ci_code_build_yml, stack_name(PROJECT)) + + +def test_deploy_ci_template(ci_code_build_yml): + aws_access = AwsAccess(None) + aws_access.validate_cloudformation_template(ci_code_build_yml) + + +def test_deploy_ci_template_with_cnf_lint(tmp_path, ci_code_build_yml): + validate_using_cfn_lint(tmp_path, ci_code_build_yml) diff --git a/test/test_deploy_source_credentials.py b/test/test_deploy_source_credentials.py new file mode 100644 index 0000000..cfbe2d2 --- /dev/null +++ b/test/test_deploy_source_credentials.py @@ -0,0 +1,44 @@ +from unittest.mock import MagicMock + +import pytest + +from exasol_script_languages_container_ci_setup.lib.aws_access import AwsAccess +from exasol_script_languages_container_ci_setup.lib.render_template import render_template +from exasol_script_languages_container_ci_setup.lib.source_credentials import ( + run_deploy_source_credentials, + SOURCE_CREDENTIALS_STACK_NAME +) + +from test.cloudformation_validation import validate_using_cfn_lint + +SECRET_NAME = "test_secret" +SECRET_USER_KEY = "test_secret_user_key" +SECRET_TOKEN_KEY = "test_secret_token_key" + + +@pytest.fixture +def source_credentials_yml(): + return render_template("slc_source_credential.yaml", + secret_name=SECRET_NAME, + secret_user_key=SECRET_USER_KEY, + secret_token_key=SECRET_TOKEN_KEY) + + +def test_deploy_source_credentials_upload_invoked(source_credentials_yml): + """ + Test if function upload_cloudformation_stack() will be invoked with expected values + when we run run_deploy_source_credentials() + """ + aws_access_mock = MagicMock() + run_deploy_source_credentials(aws_access=aws_access_mock, secret_name=SECRET_NAME, + secret_user_key=SECRET_USER_KEY, secret_token_key=SECRET_TOKEN_KEY) + aws_access_mock.upload_cloudformation_stack.assert_called_once_with(source_credentials_yml, SOURCE_CREDENTIALS_STACK_NAME) + + +def test_deploy_source_credentials_template(source_credentials_yml): + aws_access = AwsAccess(aws_profile=None) + aws_access.validate_cloudformation_template(source_credentials_yml) + + +def test_deploy_source_credentials_template_with_cnf_lint(tmp_path, source_credentials_yml): + validate_using_cfn_lint(tmp_path, source_credentials_yml) diff --git a/test/test_generate_buildspec.py b/test/test_generate_buildspec.py new file mode 100644 index 0000000..c2baeb5 --- /dev/null +++ b/test/test_generate_buildspec.py @@ -0,0 +1,136 @@ +import json + +import jsonschema +import pytest + +from exasol_script_languages_container_ci_setup.lib.render_template import render_template +from exasol_script_languages_container_ci_setup.lib.run_generate_buildspec import run_generate_buildspec, \ + get_config_file_parameter + +expected_result_root_buildspec = """ +version: 0.2 + +# ---- AUTOMATICALLY GENERATED FILE -------- +# ---- DO NOT EDIT MANUALLY, BUT USE PYTHON MODULE "script-languages-container-ci-setup" TO UPDATE --- + +batch: + fast-fail: false + build-graph: + - identifier: build_test_flavor + env: + variables: + FLAVOR: test-flavor + buildspec: {location}/build_buildspec.yaml + privileged-mode: true + type: BUILD_GENERAL1_MEDIUM +""" + + +def test_buildspec(tmp_path): + """ + Run run_generate_buildspec() for one flavor and compare result! + """ + root_path = tmp_path / "flavors" + test_flavor = root_path / "test-flavor" + test_flavor.mkdir(parents=True, exist_ok=False) + out_path = tmp_path / "out" + out_path.mkdir(parents=False, exist_ok=False) + + script_languages_ci_location = "http://slc-ci" + run_generate_buildspec((str(root_path),), str(out_path.absolute()), config_file=None) + + with open(out_path / "buildspec.yaml", "r") as res_file: + res = res_file.read() + + assert res.strip() == expected_result_root_buildspec.strip().format(location=str(out_path)) + + with open(out_path / "build_buildspec.yaml", "r") as res_file: + res = res_file.read() + + # For build_buildspec.yaml we re-use the template for testing + expected_result_build_buildspec = render_template("build_buildspec.yaml", + script_languages_ci_location=script_languages_ci_location, + config_file_parameter="") + assert res.strip() == expected_result_build_buildspec.strip(). \ + format(script_languages_ci_location=script_languages_ci_location) + + +def test_buildspec_with_valid_config_file(tmp_path): + """ + Run run_generate_buildspec() for one flavor with a valid config file and compare result! + """ + root_path = tmp_path / "flavors" + test_flavor = root_path / "test-flavor" + test_flavor.mkdir(parents=True, exist_ok=False) + out_path = tmp_path / "out" + out_path.mkdir(parents=False, exist_ok=False) + + a_folder = tmp_path / "a_folder" + a_folder.mkdir(parents=False, exist_ok=False) + + config_file_path = tmp_path / "build_config.json" + config = {"build_ignore": {"ignored_paths": [str(a_folder)]}} + with open(config_file_path, "w") as f: + json.dump(config, f) + + run_generate_buildspec((str(root_path),), str(out_path.absolute()), + config_file=str(config_file_path.absolute())) + + with open(out_path / "buildspec.yaml", "r") as res_file: + res = res_file.read() + + assert res.strip() == expected_result_root_buildspec.strip().format(location=str(out_path)) + + with open(out_path / "build_buildspec.yaml", "r") as res_file: + res = res_file.read() + + # For build_buildspec.yaml we re-use the template for testing + expected_result_build_buildspec = render_template("build_buildspec.yaml", + config_file_parameter= + get_config_file_parameter(config_file_path)) + assert res.strip() == expected_result_build_buildspec.strip() + + +def test_buildspec_with_invalid_config_file(tmp_path): + """ + Run run_generate_buildspec() for one flavor with an invalid config file and check for correct exception! + """ + root_path = tmp_path / "flavors" + test_flavor = root_path / "test-flavor" + test_flavor.mkdir(parents=True, exist_ok=False) + out_path = tmp_path / "out" + out_path.mkdir(parents=False, exist_ok=False) + + config_file_path = tmp_path / "build_config.json" + # Incorrect config ('ignored_path' instead of 'ignored_paths') + config = {"build_ignore": {"ignored_path": ["a_folder"]}} + with open(config_file_path, "w") as f: + json.dump(config, f) + + with pytest.raises(jsonschema.exceptions.ValidationError): + run_generate_buildspec((str(root_path),), str(out_path.absolute()), + config_file=str(config_file_path.absolute())) + + +def test_buildspec_with_invalid_folder(tmp_path): + """ + Run run_generate_buildspec() for one flavor with a valid config file, but invalid content and check for correct exception! + """ + root_path = tmp_path / "flavors" + test_flavor = root_path / "test-flavor" + test_flavor.mkdir(parents=True, exist_ok=False) + out_path = tmp_path / "out" + out_path.mkdir(parents=False, exist_ok=False) + + config_file_path = tmp_path / "build_config.json" + + a_folder = tmp_path / "a_folder" + # Incorrect config (tmp_path/a_folder does not exists) + config = {"build_ignore": {"ignored_paths": [str(a_folder)]}} + with open(config_file_path, "w") as f: + json.dump(config, f) + + with pytest.raises(ValueError): + run_generate_buildspec((str(root_path),), str(out_path.absolute()), + config_file=str(config_file_path.absolute())) + diff --git a/user_guide/user_guide.md b/user_guide/user_guide.md index c265ac7..347e165 100644 --- a/user_guide/user_guide.md +++ b/user_guide/user_guide.md @@ -1 +1,61 @@ -t.b.d. \ No newline at end of file +## Requirements + +This package requires: +* Python (>=3.8) +* AWS CLI +* AWS profile + +## Installation + +You can install the latest wheel package from the [Github Release page](https://github.com/exasol/script-languages-container-ci-setup/releases): +` +pip install https://github.com/exasol/script-languages-container-ci-setup/releases/download/$RELEASE/exasol_script_languages_container_ci-setup-$RELEASE-py3-none-any.whl +` +(Replace $RELEASE with the actual release you are interested in) + +## Usage + +The following commands are available: +* `health` checks the current environment and setup of your AWS CLI installation +* `deploy-ci-build` deploys the AWS Cloudformation Stack which runs CodeBuild on the given AWS profile +* `deploy-source-credentials` deploys the AWS Cloudformation Stack for the source credentials +* `generate-buildspec` Generates the buildspec files for the given script language flavors. +* `validate-ci-build` validates the AWS Cloudformation Template for the CodeBuild stack on AWS with the given parameters +* `validate-source-credentials` validates the AWS Cloudformation Template for the source credentials stack on AWS with the given parameters. + +## Requirements on AWS + +The template expect to have 2 secrets stored on AWS' SecretManager for the respective AWS account: +* Dockerhub (having secret keys: `User` and `AccessToken`). Used to pull/push Docker images from Dockerhub during the CI jobs. +* A secret containing the GH username and personal-token. Used to register the webhook and interact with the Github repositories. + +## Deployements on AWS + +### Background + +In order to accelerate the CI builds of the script language container we want to use AWS CodeBuild batch build, which enables us to run certain steps in parallel. As the number of flavor will change over time, this requires the generation of the buildspec again and again in the future. To simplify this process we created this project which automates the generation. Also we expect to have multiple repositories of the script language container in the future, each having it's own Code Build instance. With the automatic generation of the CodeBuild instances via AWS Cloudformation we can simplify this generation of new instances when we have new script language repositories. + +#### Split of CodeBuild Stack and Source Credentials Stack + +We have put the source credentials cloudformation specification in another file because AWS allows to have only one instance of SourceCredential per `ServerType` (GITHUB in our case). See https://thomasstep.com/blog/cloudformation-example-for-codebuild-with-a-webhook for more information. +This means the stack for source credentials needs to be deployed only once, and not per project. + +### Deployements + +There are currently 2 types of stacks to be deployed: +* The source credentials stack, named: `SLCSourceCredentials`. +* Arbitrary number of stacks for the CI builds, named `${PROJECT}` (${PROJECT} is the name of the project given as argument) + +#### SLCSourceCredentials + +This is one stack containing the credentials to interact with all script-languages repositories. +It contains only one entity of type `AWS::CodeBuild::SourceCredential`. + +#### CIBuild + +There will be one code build stack for each repository. Each stack contains +* roles for the CodeBuild, S3 artifact-Bucket and Batchbuild. +* A S3 Bucket for the artifacts of the build. The artifacts usually contain the logs of the `exaslct` runs. +* A policy for the S3 Bucket. +* The CodeBuild instance itself. It will be named `${PROJECT}CodeBuild`. (${PROJECT} is the name of the project given as argument) +