From 7d68063f980bfcef8e7769fbe484988576d5db6b Mon Sep 17 00:00:00 2001 From: Daniel Bernstein Date: Fri, 1 Sep 2023 15:39:12 -0700 Subject: [PATCH 01/30] Introduces the palace-quicksight script with three operations: export analysis, import template, publish dashboard from template Add input and output directories. --- .gitignore | 2 + README.md | 5 +- bin/palace-quicksight | 11 + core/__init__.py | 0 core/cli.py | 95 +++++++++ core/operation/__init__.py | 0 core/operation/baseoperation.py | 70 +++++++ core/operation/export_analysis_operation.py | 123 +++++++++++ core/operation/import_from_json_operation.py | 82 ++++++++ .../publish_dashboard_from_template.py | 104 +++++++++ core/util.py | 32 +++ poetry.lock | 197 ++++++++++++++++++ pyproject.toml | 20 ++ 13 files changed, 739 insertions(+), 2 deletions(-) create mode 100644 .gitignore create mode 100755 bin/palace-quicksight create mode 100644 core/__init__.py create mode 100644 core/cli.py create mode 100644 core/operation/__init__.py create mode 100644 core/operation/baseoperation.py create mode 100644 core/operation/export_analysis_operation.py create mode 100644 core/operation/import_from_json_operation.py create mode 100644 core/operation/publish_dashboard_from_template.py create mode 100644 core/util.py create mode 100644 poetry.lock create mode 100644 pyproject.toml diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..083d732 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +__pycache__ +.idea diff --git a/README.md b/README.md index 5e45a53..36275df 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,3 @@ -# palace-quicksight -AWS QuickSight Resources for Palace +h1. Palace Quicksight Tools + +A suite of command line operations for exporting and importing quicksight dashboards from and to AWS accounts. diff --git a/bin/palace-quicksight b/bin/palace-quicksight new file mode 100755 index 0000000..188d18a --- /dev/null +++ b/bin/palace-quicksight @@ -0,0 +1,11 @@ +#!/usr/bin/env python +"""The entry point for Palace Quicksight Tools""" +import os +import sys + +from core.cli import cli +bin_dir = os.path.split(__file__)[0] +package_dir = os.path.join(bin_dir, "..") +sys.path.append(os.path.abspath(package_dir)) + +cli() \ No newline at end of file diff --git a/core/__init__.py b/core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/core/cli.py b/core/cli.py new file mode 100644 index 0000000..2b2141c --- /dev/null +++ b/core/cli.py @@ -0,0 +1,95 @@ +import click + +import logging + +from core.operation.export_analysis_operation import ExportAnalysisOperation +from core.operation.import_from_json_operation import ImportFromJsonOperation +from core.operation.publish_dashboard_from_template import PublishDashboardFromTemplateOperation + +log = logging.getLogger("core.cli") + + +@click.group() +def cli(): + pass + + +@click.command() +@click.option('--aws-profile', required=True, help='The AWS account profile') +@click.option('--aws-account-id', required=True, help='The ID of the AWS account') +@click.option('--analysis-id', required=True, help='The ID of the Analysis to be exported') +@click.option('--output-dir', required=True, + help='The path to the output directory to which resources will be exported') +def export_analysis(aws_profile: str, aws_account_id: str, analysis_id: str, output_dir: str): + """ + Creates a template from the analysis and exports at and the dataset(s) to json. + """ + click.echo(f"Create version") + click.echo(f"aws_profile = {aws_profile}") + click.echo(f"analysis_id= {analysis_id}") + click.echo(f"aws_account_id= {aws_account_id}") + click.echo(f"output_dir= {output_dir}") + ExportAnalysisOperation(aws_profile=aws_profile, aws_account_id=aws_account_id, analysis_id=analysis_id, + output_dir=output_dir).execute() + + +cli.add_command(export_analysis) + + +@click.command +@click.option('--aws-profile', required=True, help='The AWS account profile') +@click.option('--aws-account-id', required=True, help='The ID of the AWS account') +@click.option('--template-name', required=True, help='The name of the template to be restored') +@click.option('--data-source-arn', required=True, + help='The ARN of the data source you want to associate with the data sets') +@click.option('--target-namespace', required=True, + help='The namespace you wish to target (e.g. tpp-prod, tpp-dev, tpp-staging).') +@click.option('--input-dir', required=True, + help='The path to the input directory from which resources will be imported') +def import_from_json(aws_profile: str, aws_account_id: str, template_name: str, data_source_arn: str, + target_namespace: str, input_dir: str): + """ + Import template and datasource files from json + """ + + click.echo(f"import_from_json") + click.echo(f"aws_profile = {aws_profile}") + click.echo(f"aws_account_id = {aws_account_id}") + click.echo(f"template_name = {template_name}") + click.echo(f"data_source_arn = {data_source_arn}") + click.echo(f"input_dir= {input_dir}") + + ImportFromJsonOperation(aws_profile=aws_profile, aws_account_id=aws_account_id, template_name=template_name, + target_namespace=target_namespace, data_source_arn=data_source_arn, + input_dir=input_dir).execute() + + +cli.add_command(import_from_json) + + +@click.command +@click.option('--aws-profile', required=True, help='The AWS account profile') +@click.option('--aws-account-id', required=True, help='The ID of the AWS account') +@click.option('--template-id', required=True, help='The ID of the template to be restored') +@click.option('--target-namespace', required=True, + help='The namespace you wish to target (e.g. tpp-prod, tpp-dev, tpp-staging).') +@click.option('--group-name', required=True, help='Name of the Quicksight User Group') +def publish_dashboard_from_template(aws_profile: str, aws_account_id: str, template_id: str, target_namespace: str, + group_name: str): + """ + Create/Update a dashboard from a template + """ + + click.echo(f"publish dashboard from template") + click.echo(f"aws_profile = {aws_profile}") + click.echo(f"aws_account_id = {aws_account_id}") + click.echo(f"template_id = {template_id}") + click.echo(f"group_name = {group_name}") + PublishDashboardFromTemplateOperation(aws_profile=aws_profile, + aws_account_id=aws_account_id, + template_id=template_id, + target_namespace=target_namespace, + group_name=group_name).execute() + + +cli.add_command(publish_dashboard_from_template) diff --git a/core/operation/__init__.py b/core/operation/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/core/operation/baseoperation.py b/core/operation/baseoperation.py new file mode 100644 index 0000000..3a2eb80 --- /dev/null +++ b/core/operation/baseoperation.py @@ -0,0 +1,70 @@ +import json +import os +from abc import abstractmethod + + +import boto3 +import logging + +ASSET_DIR = "assets" +TEMPLATE_DIR = os.path.join(ASSET_DIR, "templates") +DATA_SET_DIR = os.path.join(ASSET_DIR, "data-sets") + + +class BaseOperation: + + """ + A base class for AWS based operations. + """ + + def __init__(self, aws_profile: str, aws_account_id: str): + self._aws_profile = aws_profile + self._aws_account_id = aws_account_id + self._qs_client = self._create_client() + self._log = logging.getLogger(self.__class__.__name__) + + def _create_client(self): + boto3.setup_default_session(profile_name=self._aws_profile) + return boto3.client('quicksight') + + @abstractmethod + def execute(self): + pass + + def _create_or_update_template(self, template_data: dict) -> [str, str]: + """ + Creates new or updates existing template. + :param template_data: + :return: Template ARN, Template Version ARN, and the Template ID + """ + try: + response = self._qs_client.create_template(**template_data) + except self._qs_client.exceptions.ResourceExistsException as e: + response = self._qs_client.update_template(**template_data) + httpStatus = response['ResponseMetadata']['HTTPStatusCode'] + if httpStatus != 202: + self._log.error(f"Unexpected response from create_template request: {httpStatus} ") + raise Exception( + f"Unexpected response from trying to create/update template : {json.dumps(response, indent=4)} ") + else: + return response["Arn"], response["VersionArn"], response["TemplateId"] + + def _create_or_update_template_from_template_definition(self, template_definition: dict): + template_definition["AwsAccountId"] = self._aws_account_id + return self._create_or_update_template(template_data=template_definition) + + def _resolve_data_set_id_from_placeholder(self, namespace:str, placeholder:str) -> str: + return namespace + "-" + placeholder + + def _get_template_definition(self, template_id): + return self._qs_client.describe_template_definition(AwsAccountId=self._aws_account_id, TemplateId=template_id, + AliasName="$LATEST") + + def _describe_data_set(self, data_set_id): + response = self._qs_client.describe_data_set(AwsAccountId=self._aws_account_id, DataSetId=data_set_id) + return response['DataSet'] + + def _resolve_path(self, *paths): + return os.path.join(*paths) + + diff --git a/core/operation/export_analysis_operation.py b/core/operation/export_analysis_operation.py new file mode 100644 index 0000000..961f83b --- /dev/null +++ b/core/operation/export_analysis_operation.py @@ -0,0 +1,123 @@ +import json +import os +from typing import List + +from core.operation.baseoperation import BaseOperation, TEMPLATE_DIR, DATA_SET_DIR +from core.util import retry, recursively_replace_value + + +class ExportAnalysisOperation(BaseOperation): + """ + Exports a Quicksight Analysis and all it's dependencies to json files on disk + """ + + def __init__(self, analysis_id: str, output_dir: str, *args, **kwargs): + self._analysis_id = analysis_id + self._output_dir = output_dir + super(ExportAnalysisOperation, self).__init__(*args, **kwargs) + + def execute(self): + + os.makedirs(self._resolve_path(self._output_dir, TEMPLATE_DIR)) + os.makedirs(self._resolve_path(self._output_dir, DATA_SET_DIR)) + + # retrieve description + analysis_description = self._qs_client.describe_analysis(AwsAccountId=self._aws_account_id, + AnalysisId=self._analysis_id) + # check that analysis exists + https_status = analysis_description['ResponseMetadata']['HTTPStatusCode'] + + if https_status != 200: + self._log.error(f"Unexpected response from describe_analysis request: {https_status} ") + return + + # retrieve definition + analysis_definition = self._qs_client.describe_analysis_definition(AwsAccountId=self._aws_account_id, + AnalysisId=self._analysis_id) + + # extract DataSet references + analysis = analysis_description["Analysis"] + data_set_identifier_declarations = analysis_definition["Definition"]["DataSetIdentifierDeclarations"] + + data_set_references = [] + for did in data_set_identifier_declarations: + data_set_references.append({ + 'DataSetPlaceholder': did["Identifier"], + 'DataSetArn': did["DataSetArn"], + }) + + # create a template from the analysis + arn, version_arn, template_id = \ + self._create_or_update_template_from_analysis(analysis=analysis, + data_set_references=data_set_references) + + def verify_success() -> bool: + template_definition = self._get_template_definition(template_id=template_id) + return "SUCCESSFUL" in template_definition["ResourceStatus"] + + retry(verify_success) + + # get the newly created template definition + template_definition = self._get_template_definition(template_id=template_id) + self._log.info(f"Writing template definition response to disk") + files_to_update = [] + map_to_save = {} + # retain only the fields we will need to restore the state. + for i in ["Name", "Definition", "TemplateId"]: + map_to_save[i] = template_definition[i] + + # save the template as json file + definition_json_str = json.dumps(map_to_save, indent=4) + template_file = self._resolve_path(self._output_dir, TEMPLATE_DIR, template_definition["Name"] + ".json") + with open(template_file, "w") as template_file: + template_file.write(definition_json_str) + + files_to_update.append(template_file) + + # for each dataset declaration identifiers + for di in data_set_identifier_declarations: + # save to json file + ds_file = self._save_dataset_to_file(di=di) + files_to_update.append(ds_file) + + def _create_or_update_template_from_analysis(self, analysis, data_set_references: List): + template_name = analysis["Name"] + params = { + 'AwsAccountId': self._aws_account_id, + 'TemplateId': template_name + "-template", + 'Name': analysis["Name"], + 'SourceEntity': { + 'SourceAnalysis': { + 'Arn': analysis['Arn'], + 'DataSetReferences': data_set_references, + }, + } + } + return self._create_or_update_template(template_data=params) + + def _save_dataset_to_file(self, di) -> str: + """ + + :param di: dataset map + :return: The path of the dataset file + """ + identifier = di['Identifier'] + arn = di['DataSetArn'] + dataset_id = arn.split("dataset/", 1)[1] + ds_def_elements_to_save = self._describe_data_set(dataset_id) + # remove the following fields from the response before saving it. + for i in ["Arn", "DataSetId", "CreatedTime", "LastUpdatedTime"]: + ds_def_elements_to_save.pop(i) + + # align the data set name with the identifier + ds_def_elements_to_save["Name"] = identifier + # remove the datasource arn since this will need to be overridden + recursively_replace_value(ds_def_elements_to_save, "DataSourceArn", "") + # save what is left to disk + ds_def_str = json.dumps(ds_def_elements_to_save, indent=4) + dataset_file = self._resolve_path(self._output_dir, DATA_SET_DIR, identifier + ".json") + + with open(dataset_file, "w") as dataset_file: + dataset_file.write(ds_def_str) + + return dataset_file diff --git a/core/operation/import_from_json_operation.py b/core/operation/import_from_json_operation.py new file mode 100644 index 0000000..042686f --- /dev/null +++ b/core/operation/import_from_json_operation.py @@ -0,0 +1,82 @@ +import json + +from core.operation.baseoperation import BaseOperation, TEMPLATE_DIR, DATA_SET_DIR +from core.util import recursively_replace_value + + +class ImportFromJsonOperation(BaseOperation): + """ + Exports a Quicksight Analysis and all it's dependencies to json files on disk + """ + + def __init__(self, template_name: str, target_namespace: str, data_source_arn: str, input_dir: str, *args, + **kwargs): + self._template_name = template_name + self._target_namespace = target_namespace + self._data_source_arn = data_source_arn + self._intput_dir = input_dir + super(ImportFromJsonOperation, self).__init__(*args, **kwargs) + + def execute(self): + # Read template file into dictionary + template_data = None + template_file = self._resolve_path(self._intput_dir, TEMPLATE_DIR, self._template_name + ".json") + with open(template_file, "r") as template_file: + template_data = json.loads(template_file.read()) + + # create namespace if not exists + # try: + # self._qs_client.create_namespace(AwsAccountId=self._aws_account_id, Namespace=self._target_namespace, + # IdentityStore="QUICKSIGHT") + # except self._qs_client.exceptions.ConflictException as e: + # self._log.info(f"Namespace {self._target_namespace} already exists: ignoring.") + # + # namespace = self._qs_client.describe_namespace(AwsAccountId=self._aws_account_id, + # Namespace=self._target_namespace) + + # create name template in namespace + template_data["Name"] = self._target_namespace + "-" + self._template_name + template_data["TemplateId"] = template_data["Name"] + arn, version_arn, template_id = self._create_or_update_template_from_template_definition( + template_definition=template_data) + + # for each data set id associated with the template + dataset_configurations = template_data["Definition"]["DataSetConfigurations"] + for di in dataset_configurations: + # Read data set into dictionary + dataset = None + placeholder = di["Placeholder"] + dataset_filename = self._resolve_path(self._intput_dir, DATA_SET_DIR, placeholder + ".json") + with open(dataset_filename, "r") as dataset_file: + dataset = json.loads(dataset_file.read()) + + # replace the blank datasource arn value in the data set dictionaries + recursively_replace_value(dataset, "DataSourceArn", self._data_source_arn) + # Remove fields that are not allowed + for i in ['OutputColumns', 'ConsumedSpiceCapacityInBytes']: + dataset.pop(i) + + # Add required fields + dataset["AwsAccountId"] = self._aws_account_id + dataset["DataSetId"] = self._resolve_data_set_id_from_placeholder(placeholder=placeholder, + namespace=self._target_namespace) + dataset["Name"] = dataset["Name"] + arn, data_set_id = self._create_or_update_data_set(dataset_definition=dataset) + + def _create_or_update_data_set(self, dataset_definition: dict): + """ + Create new or updates existing DataSet + :param dataset_definition: + :return: DataSet ARN and DataSet Id + """ + try: + response = self._qs_client.create_data_set(**dataset_definition) + except self._qs_client.exceptions.ResourceExistsException as e: + response = self._qs_client.update_data_set(**dataset_definition) + httpStatus = response['ResponseMetadata']['HTTPStatusCode'] + if httpStatus != 201 and httpStatus != 200: + self._log.error(f"Unexpected response from create_dataset request: {httpStatus} ") + raise Exception( + f"Unexpected response from trying to create/update dataset : {json.dumps(response, indent=4)} ") + else: + return response["Arn"], response["DataSetId"] diff --git a/core/operation/publish_dashboard_from_template.py b/core/operation/publish_dashboard_from_template.py new file mode 100644 index 0000000..33a2e71 --- /dev/null +++ b/core/operation/publish_dashboard_from_template.py @@ -0,0 +1,104 @@ +import json +import time + +from core.operation.baseoperation import BaseOperation +from core.util import recursively_replace_value + + +class PublishDashboardFromTemplateOperation(BaseOperation): + """ + Publishes Dashboard based on template + """ + + def __init__(self, template_id: str, target_namespace: str, group_name, *args, **kwargs): + self._template_id = template_id + self._target_namespace = target_namespace + self._group_name = group_name + super(PublishDashboardFromTemplateOperation, self).__init__(*args, **kwargs) + + def execute(self): + # get the template definition + template_def = self._get_template_definition(template_id=self._template_id)["Definition"] + + desc_template_params = {"AwsAccountId": self._aws_account_id, "TemplateId": self._template_id} + template = self._qs_client.describe_template(**desc_template_params)["Template"] + + namespace_params = {"AwsAccountId": self._aws_account_id, "Namespace": "default"} + namespace_arn = self._qs_client.describe_namespace(**namespace_params)["Namespace"]["Arn"] + + # extract the data source placeholders + dashboard_id = self._template_id + parameters = { + "AwsAccountId": self._aws_account_id, + "Name": dashboard_id, + "DashboardId": dashboard_id, + "SourceEntity": { + "SourceTemplate": { + "DataSetReferences": [], + "Arn": template["Arn"] + } + }, + } + + ds_references = parameters["SourceEntity"]["SourceTemplate"]["DataSetReferences"] + + # for each data set config + for dsr in template_def["DataSetConfigurations"]: + # resolve the dataset arn + placeholder = dsr["Placeholder"] + data_set_id = self._resolve_data_set_id_from_placeholder(placeholder=placeholder, + namespace=self._target_namespace) + data_set = self._describe_data_set(data_set_id=data_set_id) + arn = data_set["Arn"] + # associate arn with placeholder key and add to references array + ds_references.append({ + "DataSetPlaceholder": placeholder, + "DataSetArn": arn, + }) + + # publish dashboard + dashboard_arn, dashboard_id = self._create_or_update_dashboard(dashboard_params=parameters) + + # pause for a moment to allow the updates to be processed. + time.sleep(3) + + # Grant permissions + # resolve readers group + readers_group_arn = self._qs_client.describe_group(AwsAccountId=self._aws_account_id, Namespace="default", GroupName=self._group_name)["Group"]["Arn"] + + + qs_actions = ["quicksight:DescribeDashboard", + "quicksight:ListDashboardVersions", + "quicksight:QueryDashboard"] + permissions_params = { + "AwsAccountId": self._aws_account_id, + "DashboardId": self._template_id, + "GrantPermissions": [{ + "Actions": qs_actions, + "Principal": namespace_arn, + }, + { + "Actions": qs_actions, + "Principal": readers_group_arn, + }], + } + + response = self._qs_client.update_dashboard_permissions(**permissions_params) + + def _create_or_update_dashboard(self, dashboard_params: dict) -> [str, str]: + """ + Creates new or updates existing template. + :param dashboard_params: + :return: Dashboard ARN, Dashboard ID + """ + try: + response = self._qs_client.create_dashboard(**dashboard_params) + except self._qs_client.exceptions.ResourceExistsException as e: + response = self._qs_client.update_dashboard(**dashboard_params) + httpStatus = response['ResponseMetadata']['HTTPStatusCode'] + if httpStatus != 202 and httpStatus != 200: + self._log.error(f"Unexpected response from create_template request: {httpStatus} ") + raise Exception( + f"Unexpected response from trying to create/update template : {json.dumps(response, indent=4)} ") + else: + return response["Arn"], response["DashboardId"] diff --git a/core/util.py b/core/util.py new file mode 100644 index 0000000..0a51d68 --- /dev/null +++ b/core/util.py @@ -0,0 +1,32 @@ +from time import sleep + + +def retry(func) -> bool: + attempts = 5 + attempt = 0 + while True: + if func(): + break + else: + attempt += 1 + if attempt >= attempts: + raise Exception(f"Operation failed after {attempts}.") + sleep(1) + continue + + +def recursively_replace_value(mydict: dict, key: str, val: str): + """ + Recursively searches mydict for key and replaces its value (if any) with val + :param mydict: + :param key: + :param val: + :return: + """ + if key in mydict: + mydict[key] = val + + for key2 in mydict: + value = mydict[key2] + if isinstance(value, dict): + recursively_replace_value(value, key, val) \ No newline at end of file diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..71c0492 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,197 @@ +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. + +[[package]] +name = "boto3" +version = "1.28.9" +description = "The AWS SDK for Python" +optional = false +python-versions = ">= 3.7" +files = [ + {file = "boto3-1.28.9-py3-none-any.whl", hash = "sha256:01f078047eb4d238c6b9c6cc623f2af33b4ae67980c5326691e35cb5493ff6c7"}, + {file = "boto3-1.28.9.tar.gz", hash = "sha256:4cc0c6005be910e52077227e670930ab55a41ba86cdb6d1c052571d08cd4d32c"}, +] + +[package.dependencies] +botocore = ">=1.31.9,<1.32.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.6.0,<0.7.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] + +[[package]] +name = "botocore" +version = "1.31.9" +description = "Low-level, data-driven core of boto 3." +optional = false +python-versions = ">= 3.7" +files = [ + {file = "botocore-1.31.9-py3-none-any.whl", hash = "sha256:e56ccd3536a90094ea5b176b5dd33bfe4f049efdf71af468ea1661bd424c787d"}, + {file = "botocore-1.31.9.tar.gz", hash = "sha256:bd849d3ac95f1781385ed831d753a04a3ec870a59d6598175aaedd71dc2baf5f"}, +] + +[package.dependencies] +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = ">=1.25.4,<1.27" + +[package.extras] +crt = ["awscrt (==0.16.26)"] + +[[package]] +name = "click" +version = "8.1.6" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.6-py3-none-any.whl", hash = "sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5"}, + {file = "click-8.1.6.tar.gz", hash = "sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "fastcore" +version = "1.5.29" +description = "Python supercharged for fastai development" +optional = false +python-versions = ">=3.7" +files = [ + {file = "fastcore-1.5.29-py3-none-any.whl", hash = "sha256:a7d7e89faf968f2d8584df2deca344c3974f6cf476e1299cd3c067d8fa7440e9"}, + {file = "fastcore-1.5.29.tar.gz", hash = "sha256:f1a2eb04eb7933f3f9eb4064852817df44dc96e20fab5658c14c035815269a3f"}, +] + +[package.dependencies] +packaging = "*" +pip = "*" + +[package.extras] +dev = ["jupyterlab", "matplotlib", "nbdev (>=0.2.39)", "numpy", "pandas", "pillow", "torch"] + +[[package]] +name = "ghapi" +version = "1.0.4" +description = "A python client for the GitHub API" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ghapi-1.0.4-py3-none-any.whl", hash = "sha256:cb5c7008a89c270157adbaf5b2fd6951e9d9fc76131b9bec16118a558a6a4c04"}, + {file = "ghapi-1.0.4.tar.gz", hash = "sha256:9e7632c762d6f9c288e3b046b2d58c2f7992dda7c925683df435440912b10625"}, +] + +[package.dependencies] +fastcore = ">=1.5.4" +packaging = "*" +pip = "*" + +[package.extras] +dev = ["jsonref", "matplotlib"] + +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] + +[[package]] +name = "packaging" +version = "23.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, + {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, +] + +[[package]] +name = "pip" +version = "23.2" +description = "The PyPA recommended tool for installing Python packages." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pip-23.2-py3-none-any.whl", hash = "sha256:78e5353a9dda374b462f2054f83a7b63f3f065c98236a68361845c1b0ee7e35f"}, + {file = "pip-23.2.tar.gz", hash = "sha256:a160a170f3331d9ca1a0247eb1cd79c758879f1f81158f9cd05bbb5df80bea5c"}, +] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "s3transfer" +version = "0.6.1" +description = "An Amazon S3 Transfer Manager" +optional = false +python-versions = ">= 3.7" +files = [ + {file = "s3transfer-0.6.1-py3-none-any.whl", hash = "sha256:3c0da2d074bf35d6870ef157158641178a4204a6e689e82546083e31e0311346"}, + {file = "s3transfer-0.6.1.tar.gz", hash = "sha256:640bb492711f4c0c0905e1f62b6aaeb771881935ad27884852411f8e9cacbca9"}, +] + +[package.dependencies] +botocore = ">=1.12.36,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "urllib3" +version = "1.26.16" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "urllib3-1.26.16-py2.py3-none-any.whl", hash = "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f"}, + {file = "urllib3-1.26.16.tar.gz", hash = "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.10" +content-hash = "c8dcbdc818c56d1329f0689d39c87a561865bc26553e31050b079b45f81e3176" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..7f421f2 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,20 @@ +[tool.poetry] +name = "palace-quicksight" +version = "0.1.0" +description = "" +authors = ["Daniel Bernstein "] +readme = "README.md" +packages = [{include = "main.py"}] +repository = "https://github.com/dbernstein/palace-quicksight" + +[tool.poetry.dependencies] +python = "^3.10" +boto3 = "^1.18" +botocore = "^1.21" +click = "^8.1.3" +ghapi = "^1.0.4" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + From 8a0e6d702489c552395b108bae8b0e71c777af0c Mon Sep 17 00:00:00 2001 From: Daniel Bernstein Date: Fri, 1 Sep 2023 16:57:40 -0700 Subject: [PATCH 02/30] Fix formatting on README. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 36275df..31ee4be 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,3 @@ -h1. Palace Quicksight Tools +# Palace Quicksight Tools A suite of command line operations for exporting and importing quicksight dashboards from and to AWS accounts. From 4c7d55d1b7296b9eabe83a5020b3884a28598427 Mon Sep 17 00:00:00 2001 From: Daniel Bernstein Date: Fri, 1 Sep 2023 16:59:43 -0700 Subject: [PATCH 03/30] Add link to palace-quicksight-resources project. --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 31ee4be..e5bcc56 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,4 @@ # Palace Quicksight Tools A suite of command line operations for exporting and importing quicksight dashboards from and to AWS accounts. +Exported resources can be found [here](https://github.com/ThePalaceProject/palace-quicksight-resources). From 843001b2a383f3c623eedccdb7384b2744d55cb0 Mon Sep 17 00:00:00 2001 From: Daniel Bernstein Date: Fri, 1 Sep 2023 17:04:27 -0700 Subject: [PATCH 04/30] Fix docstring on import operation. --- core/operation/import_from_json_operation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/operation/import_from_json_operation.py b/core/operation/import_from_json_operation.py index 042686f..3927d1a 100644 --- a/core/operation/import_from_json_operation.py +++ b/core/operation/import_from_json_operation.py @@ -6,7 +6,7 @@ class ImportFromJsonOperation(BaseOperation): """ - Exports a Quicksight Analysis and all it's dependencies to json files on disk + Imports a Quicksight template and all it's dependencies into Quicksight. """ def __init__(self, template_name: str, target_namespace: str, data_source_arn: str, input_dir: str, *args, From 18621e71056d337c7853d2e89e910e20e04794a4 Mon Sep 17 00:00:00 2001 From: Daniel Bernstein Date: Fri, 1 Sep 2023 17:07:44 -0700 Subject: [PATCH 05/30] Add instructions to readme for running the script. --- README.md | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/README.md b/README.md index e5bcc56..cfa9f94 100644 --- a/README.md +++ b/README.md @@ -2,3 +2,20 @@ A suite of command line operations for exporting and importing quicksight dashboards from and to AWS accounts. Exported resources can be found [here](https://github.com/ThePalaceProject/palace-quicksight-resources). + +## Usage + +``` +./bin/palace-quicksight --help +Usage: palace-quicksight [OPTIONS] COMMAND [ARGS]... + +Options: + --help Show this message and exit. + +Commands: + export-analysis Creates a template from the analysis... + import-from-json Import template and datasource files... + publish-dashboard-from-template + Create/Update a dashboard from a template + +``` From b74d794a05309d50660d0c0027794cbe622a9309 Mon Sep 17 00:00:00 2001 From: Daniel Bernstein Date: Fri, 1 Sep 2023 17:12:52 -0700 Subject: [PATCH 06/30] Simplify command names. --- README.md | 8 +++----- core/cli.py | 8 ++++---- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index cfa9f94..4b3ab09 100644 --- a/README.md +++ b/README.md @@ -13,9 +13,7 @@ Options: --help Show this message and exit. Commands: - export-analysis Creates a template from the analysis... - import-from-json Import template and datasource files... - publish-dashboard-from-template - Create/Update a dashboard from a template - + export-analysis Creates a template from the analysis and exports at... + import-template Import template and datasource files from json + publish-dashboard Create/Update a dashboard from a template ``` diff --git a/core/cli.py b/core/cli.py index 2b2141c..49d6a6e 100644 --- a/core/cli.py +++ b/core/cli.py @@ -46,7 +46,7 @@ def export_analysis(aws_profile: str, aws_account_id: str, analysis_id: str, out help='The namespace you wish to target (e.g. tpp-prod, tpp-dev, tpp-staging).') @click.option('--input-dir', required=True, help='The path to the input directory from which resources will be imported') -def import_from_json(aws_profile: str, aws_account_id: str, template_name: str, data_source_arn: str, +def import_template(aws_profile: str, aws_account_id: str, template_name: str, data_source_arn: str, target_namespace: str, input_dir: str): """ Import template and datasource files from json @@ -64,7 +64,7 @@ def import_from_json(aws_profile: str, aws_account_id: str, template_name: str, input_dir=input_dir).execute() -cli.add_command(import_from_json) +cli.add_command(import_template) @click.command @@ -74,7 +74,7 @@ def import_from_json(aws_profile: str, aws_account_id: str, template_name: str, @click.option('--target-namespace', required=True, help='The namespace you wish to target (e.g. tpp-prod, tpp-dev, tpp-staging).') @click.option('--group-name', required=True, help='Name of the Quicksight User Group') -def publish_dashboard_from_template(aws_profile: str, aws_account_id: str, template_id: str, target_namespace: str, +def publish_dashboard(aws_profile: str, aws_account_id: str, template_id: str, target_namespace: str, group_name: str): """ Create/Update a dashboard from a template @@ -92,4 +92,4 @@ def publish_dashboard_from_template(aws_profile: str, aws_account_id: str, templ group_name=group_name).execute() -cli.add_command(publish_dashboard_from_template) +cli.add_command(publish_dashboard) From b4e863a98ef198098fc836a227fa3b00d7627281 Mon Sep 17 00:00:00 2001 From: Daniel Bernstein Date: Tue, 5 Sep 2023 20:57:36 -0700 Subject: [PATCH 07/30] Add a couple of tests, tox configuration, github ci configuration. --- .github/actions/poetry/action.yml | 25 ++ .github/dependabot.yml | 15 ++ .github/pull_request_template.md | 22 ++ .github/release.yml | 23 ++ .github/workflows/lint.yml | 38 +++ .github/workflows/mypy.yml | 32 +++ .github/workflows/test-build.yml | 56 +++++ .gitignore | 1 + core/cli.py | 13 +- core/operation/baseoperation.py | 9 +- core/operation/export_analysis_operation.py | 14 +- pyproject.toml | 5 +- .../core/operation/analysis_test_responses.py | 230 ++++++++++++++++++ .../test_export_analysis_operation.py | 67 +++++ tests/core/test_cli.py | 10 + tox.ini | 25 ++ 16 files changed, 565 insertions(+), 20 deletions(-) create mode 100644 .github/actions/poetry/action.yml create mode 100644 .github/dependabot.yml create mode 100644 .github/pull_request_template.md create mode 100644 .github/release.yml create mode 100644 .github/workflows/lint.yml create mode 100644 .github/workflows/mypy.yml create mode 100644 .github/workflows/test-build.yml create mode 100644 tests/core/operation/analysis_test_responses.py create mode 100644 tests/core/operation/test_export_analysis_operation.py create mode 100644 tests/core/test_cli.py create mode 100644 tox.ini diff --git a/.github/actions/poetry/action.yml b/.github/actions/poetry/action.yml new file mode 100644 index 0000000..3e6a2db --- /dev/null +++ b/.github/actions/poetry/action.yml @@ -0,0 +1,25 @@ +name: 'Poetry' +description: 'Install python poetry' + +inputs: + version: + description: "Version of poetry to install" + required: false + default: "1.5.1" + +outputs: + version: + description: "Installed version" + value: ${{ steps.poetry-version.outputs.version }} + +runs: + using: "composite" + steps: + - run: | + curl -sSL https://install.python-poetry.org | python - --yes --version ${{ inputs.version }} + echo "$HOME/.local/bin" >> $GITHUB_PATH + shell: bash + + - id: poetry-version + run: echo "version=$(poetry --version)" >> $GITHUB_OUTPUT + shell: bash diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..14cc274 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,15 @@ +# Please see the documentation for all configuration options: +# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates + +version: 2 +updates: + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "daily" + versioning-strategy: increase-if-necessary + + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "daily" diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000..02cecc3 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,22 @@ +## Description + + + +## Motivation and Context + + + + +## How Has This Been Tested? + + + + + +## Checklist + + + + +- [ ] I have updated the documentation accordingly. +- [ ] All new and existing tests passed. diff --git a/.github/release.yml b/.github/release.yml new file mode 100644 index 0000000..0b89c12 --- /dev/null +++ b/.github/release.yml @@ -0,0 +1,23 @@ +changelog: + exclude: + labels: + - ignore for notes + categories: + - title: Incompatible Changes + labels: + - incompatible changes + - title: Features + labels: + - feature + - title: Bugfixes + labels: + - bug + - title: Other Changes + labels: + - "*" + exclude: + labels: + - dependencies + - title: Dependency Updates + labels: + - dependencies diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 0000000..c06bb78 --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,38 @@ +name: Lint +on: [push, pull_request] +env: + PYTHON_VERSION: 3.9 + +jobs: + lint: + name: Lint + runs-on: ubuntu-latest + permissions: + contents: read + + steps: + - uses: actions/checkout@v3 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: ${{ env.PYTHON_VERSION }} + + - name: Install Poetry + uses: ./.github/actions/poetry + + - name: Install Pre-commit + run: | + poetry install --only ci + env: + POETRY_VIRTUALENVS_CREATE: false + + - name: Restore pre-commit cache + uses: actions/cache@v3 + with: + path: ~/.cache/pre-commit + key: pre-commit-${{ runner.os }}-py${{ env.PYTHON_VERSION }}-${{ hashFiles('.pre-commit-config.yaml') }} + restore-keys: pre-commit-${{ runner.os }}-py${{ env.PYTHON_VERSION }} + + - name: Lint + run: pre-commit run --all-files --show-diff-on-failure diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml new file mode 100644 index 0000000..beca934 --- /dev/null +++ b/.github/workflows/mypy.yml @@ -0,0 +1,32 @@ +name: Mypy (Type check) +on: [push, pull_request] +env: + PYTHON_VERSION: 3.9 + +jobs: + mypy: + runs-on: ubuntu-latest + permissions: + contents: read + + steps: + - uses: actions/checkout@v3 + + - name: Set up Python 🐍 + uses: actions/setup-python@v4 + with: + python-version: ${{ env.PYTHON_VERSION }} + + - name: Install Poetry 🎸 + uses: ./.github/actions/poetry + + - name: Install OS Packages 🧰 + run: | + sudo apt-get update + sudo apt-get install --yes libxmlsec1-dev libxml2-dev + + - name: Install Python Packages 📦 + run: poetry install --without ci + + - name: Run MyPy 🪄 + run: poetry run mypy diff --git a/.github/workflows/test-build.yml b/.github/workflows/test-build.yml new file mode 100644 index 0000000..0f55818 --- /dev/null +++ b/.github/workflows/test-build.yml @@ -0,0 +1,56 @@ +name: Test & Build +on: [push, pull_request] + +concurrency: + group: test-build-${{ github.ref_name }}-${{ github.event_name }} + cancel-in-progress: true + +jobs: + test: + name: ${{ matrix.module }} Tests (Py ${{ matrix.python-version }}) + runs-on: ubuntu-latest + timeout-minutes: 60 + permissions: + contents: read + strategy: + fail-fast: false + matrix: + python-version: ["3.8", "3.9", "3.10", "3.11"] + module: [Core] + + # We want to run on external PRs, but not on our own internal PRs as they'll be run + # by the push to the branch. This prevents duplicated runs on internal PRs. + # Some discussion of this here: + # https://github.community/t/duplicate-checks-on-push-and-pull-request-simultaneous-event/18012 + if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository + + steps: + - uses: actions/checkout@v3 + + # See comment here: https://github.com/actions/runner-images/issues/1187#issuecomment-686735760 + - name: Disable network offload + run: sudo ethtool -K eth0 tx off rx off + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install Apt Packages + run: | + sudo apt-get update + + - name: Install Poetry + uses: ./.github/actions/poetry + + - name: Install Tox + run: | + poetry install --only ci + env: + POETRY_VIRTUALENVS_CREATE: false + + - name: Run Tests + run: tox + env: + MODULE: ${{ matrix.module }} + diff --git a/.gitignore b/.gitignore index 083d732..7efed60 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,3 @@ __pycache__ .idea +.tox diff --git a/core/cli.py b/core/cli.py index 49d6a6e..885dbd1 100644 --- a/core/cli.py +++ b/core/cli.py @@ -1,3 +1,4 @@ +import boto3 import click import logging @@ -9,6 +10,10 @@ log = logging.getLogger("core.cli") +def create_quicksight_client(aws_profile:str): + boto3.setup_default_session(profile_name=aws_profile) + return boto3.client('quicksight') + @click.group() def cli(): pass @@ -22,14 +27,14 @@ def cli(): help='The path to the output directory to which resources will be exported') def export_analysis(aws_profile: str, aws_account_id: str, analysis_id: str, output_dir: str): """ - Creates a template from the analysis and exports at and the dataset(s) to json. + Exports a template and dependent data sets based on the specified analysis to JSON files. """ click.echo(f"Create version") click.echo(f"aws_profile = {aws_profile}") click.echo(f"analysis_id= {analysis_id}") click.echo(f"aws_account_id= {aws_account_id}") click.echo(f"output_dir= {output_dir}") - ExportAnalysisOperation(aws_profile=aws_profile, aws_account_id=aws_account_id, analysis_id=analysis_id, + ExportAnalysisOperation(qs_client=create_quicksight_client(aws_profile=aws_profile), aws_account_id=aws_account_id, analysis_id=analysis_id, output_dir=output_dir).execute() @@ -59,7 +64,7 @@ def import_template(aws_profile: str, aws_account_id: str, template_name: str, d click.echo(f"data_source_arn = {data_source_arn}") click.echo(f"input_dir= {input_dir}") - ImportFromJsonOperation(aws_profile=aws_profile, aws_account_id=aws_account_id, template_name=template_name, + ImportFromJsonOperation(qs_client=create_quicksight_client(aws_profile), aws_account_id=aws_account_id, template_name=template_name, target_namespace=target_namespace, data_source_arn=data_source_arn, input_dir=input_dir).execute() @@ -85,7 +90,7 @@ def publish_dashboard(aws_profile: str, aws_account_id: str, template_id: str, t click.echo(f"aws_account_id = {aws_account_id}") click.echo(f"template_id = {template_id}") click.echo(f"group_name = {group_name}") - PublishDashboardFromTemplateOperation(aws_profile=aws_profile, + PublishDashboardFromTemplateOperation(qs_client=create_quicksight_client(aws_profile), aws_account_id=aws_account_id, template_id=template_id, target_namespace=target_namespace, diff --git a/core/operation/baseoperation.py b/core/operation/baseoperation.py index 3a2eb80..e1c628a 100644 --- a/core/operation/baseoperation.py +++ b/core/operation/baseoperation.py @@ -17,16 +17,11 @@ class BaseOperation: A base class for AWS based operations. """ - def __init__(self, aws_profile: str, aws_account_id: str): - self._aws_profile = aws_profile + def __init__(self, qs_client: object, aws_account_id: str): self._aws_account_id = aws_account_id - self._qs_client = self._create_client() + self._qs_client = qs_client self._log = logging.getLogger(self.__class__.__name__) - def _create_client(self): - boto3.setup_default_session(profile_name=self._aws_profile) - return boto3.client('quicksight') - @abstractmethod def execute(self): pass diff --git a/core/operation/export_analysis_operation.py b/core/operation/export_analysis_operation.py index 961f83b..13e342f 100644 --- a/core/operation/export_analysis_operation.py +++ b/core/operation/export_analysis_operation.py @@ -18,8 +18,8 @@ def __init__(self, analysis_id: str, output_dir: str, *args, **kwargs): def execute(self): - os.makedirs(self._resolve_path(self._output_dir, TEMPLATE_DIR)) - os.makedirs(self._resolve_path(self._output_dir, DATA_SET_DIR)) + os.makedirs(self._resolve_path(self._output_dir, TEMPLATE_DIR), exist_ok=True) + os.makedirs(self._resolve_path(self._output_dir, DATA_SET_DIR), exist_ok=True) # retrieve description analysis_description = self._qs_client.describe_analysis(AwsAccountId=self._aws_account_id, @@ -52,23 +52,23 @@ def execute(self): data_set_references=data_set_references) def verify_success() -> bool: - template_definition = self._get_template_definition(template_id=template_id) - return "SUCCESSFUL" in template_definition["ResourceStatus"] + self._template_definition = self._get_template_definition(template_id=template_id) + + return "SUCCESSFUL" in self._template_definition["ResourceStatus"] retry(verify_success) # get the newly created template definition - template_definition = self._get_template_definition(template_id=template_id) self._log.info(f"Writing template definition response to disk") files_to_update = [] map_to_save = {} # retain only the fields we will need to restore the state. for i in ["Name", "Definition", "TemplateId"]: - map_to_save[i] = template_definition[i] + map_to_save[i] = self._template_definition[i] # save the template as json file definition_json_str = json.dumps(map_to_save, indent=4) - template_file = self._resolve_path(self._output_dir, TEMPLATE_DIR, template_definition["Name"] + ".json") + template_file = self._resolve_path(self._output_dir, TEMPLATE_DIR, self._template_definition["Name"] + ".json") with open(template_file, "w") as template_file: template_file.write(definition_json_str) diff --git a/pyproject.toml b/pyproject.toml index 7f421f2..fa2e666 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ version = "0.1.0" description = "" authors = ["Daniel Bernstein "] readme = "README.md" -packages = [{include = "main.py"}] +packages = [{include = "core"}] repository = "https://github.com/dbernstein/palace-quicksight" [tool.poetry.dependencies] @@ -12,7 +12,8 @@ python = "^3.10" boto3 = "^1.18" botocore = "^1.21" click = "^8.1.3" -ghapi = "^1.0.4" +pytest= ">=7.2.0" + [build-system] requires = ["poetry-core>=1.0.0"] diff --git a/tests/core/operation/analysis_test_responses.py b/tests/core/operation/analysis_test_responses.py new file mode 100644 index 0000000..bd64946 --- /dev/null +++ b/tests/core/operation/analysis_test_responses.py @@ -0,0 +1,230 @@ +from datetime import datetime + +from dateutil.tz import tzlocal + + +def get_analysis_description_response(analysis_id): + return { + 'ResponseMetadata': {'RequestId': 'aca2b75d-503b-48c4-b66c-273eafe97a33', + 'HTTPStatusCode': 200, + 'HTTPHeaders': {'date': 'Tue, 05 Sep 2023 19:29:21 GMT', + 'content-type': 'application/json', + 'content-length': '1495', + 'connection': 'keep-alive', + 'x-amzn-requestid': 'aca2b75d-503b-48c4-b66c-273eafe97a33'}, + 'RetryAttempts': 0}, 'Status': 200, + 'Analysis': {'AnalysisId': analysis_id, + 'Arn': 'arn', + 'Name': 'library', 'Status': 'CREATION_SUCCESSFUL', 'DataSetArns': [ + 'arn:aws:quicksight:us-west-2:128682227026:dataset/e9e15c78-0193-4e4c-9a49-ed005569297d', + 'arn:aws:quicksight:us-west-2:128682227026:dataset/86eb4ca5-9552-4ba6-8b1b-7ef1b9b40f78', ], + 'ThemeArn': 'theme-arn', + 'CreatedTime': datetime(2023, 9, 1, 10, 6, 19, 376000, tzinfo=tzlocal()), + 'LastUpdatedTime': datetime(2023, 9, 1, 10, 6, 19, 376000, tzinfo=tzlocal()), + 'Sheets': [ + {'SheetId': '9f2df4a8-21e2-4aa4-adcd-3fb32e86c4ba', + 'Name': 'Circulation Summary'}, + {'SheetId': '341952d3-ece8-4a4b-924c-2d16c905e486', + 'Name': 'Circulation over Time'}, + {'SheetId': '8e9ca074-e043-4e02-be63-649e1ded32e6', + 'Name': 'Loaned Title Use'}]}, + 'RequestId': 'aca2b75d-503b-48c4-b66c-273eafe97a33'} + + +def get_analysis_definition_response(): + return { + 'ResponseMetadata': {'RequestId': 'dac531e6-fc4f-41a5-986e-4212c79e9c6c', 'HTTPStatusCode': 200, + 'HTTPHeaders': {'date': 'Tue, 05 Sep 2023 19:47:04 GMT', + 'content-type': 'application/json', 'content-length': '172259', + 'connection': 'keep-alive', + 'x-amzn-requestid': 'dac531e6-fc4f-41a5-986e-4212c79e9c6c'}, + 'RetryAttempts': 0}, 'Status': 200, + 'AnalysisId': 'd954330e-7b80-4a4a-ab64-47f53d8eea38', 'Name': 'library', + 'ResourceStatus': 'CREATION_SUCCESSFUL', + 'ThemeArn': 'theme-arn', + 'Definition': {'DataSetIdentifierDeclarations': [{'Identifier': 'circulation_view', + 'DataSetArn': 'arn:aws:quicksight:us-west-2:128682227026:dataset/e9e15c78-0193-4e4c-9a49-ed005569297d'}, + {'Identifier': 'patron_events', + 'DataSetArn': 'arn:aws:quicksight:us-west-2:128682227026:dataset/86eb4ca5-9552-4ba6-8b1b-7ef1b9b40f78'}], + 'Sheets': [], + 'ColumnConfigurations': [], + 'AnalysisDefaults': {'DefaultNewSheetConfiguration': { + 'InteractiveLayoutConfiguration': {'Grid': {'CanvasSizeOptions': { + 'ScreenCanvasSizeOptions': {'ResizeOption': 'FIXED', + 'OptimizedViewPortWidth': '1600px'}}}}, + 'SheetContentType': 'INTERACTIVE'}}}, 'RequestId': 'dac531e6-fc4f-41a5-986e-4212c79e9c6c'} + + +def create_template_response(): + return {'ResponseMetadata': {'RequestId': '55aba51c-0052-4311-8fb3-b9433f0041da', 'HTTPStatusCode': 202, + 'HTTPHeaders': {'date': 'Tue, 05 Sep 2023 21:52:26 GMT', + 'content-type': 'application/json', 'content-length': '293', + 'connection': 'keep-alive', + 'x-amzn-requestid': '55aba51c-0052-4311-8fb3-b9433f0041da'}, + 'RetryAttempts': 0}, 'Status': 202, 'TemplateId': 'library-template', + 'Arn': 'arn:aws:quicksight:us-west-2:128682227026:template/library-template', + 'VersionArn': 'arn:aws:quicksight:us-west-2:128682227026:template/library-template/version/9', + 'CreationStatus': 'CREATION_IN_PROGRESS', 'RequestId': '55aba51c-0052-4311-8fb3-b9433f0041da'} + + +def create_template_parameters(aws_account_id: str): + {'AwsAccountId': aws_account_id, 'TemplateId': 'library-template', 'Name': 'library', 'SourceEntity': { + 'SourceAnalysis': { + 'Arn': 'arn:aws:quicksight:us-west-2:128682227026:analysis/d954330e-7b80-4a4a-ab64-47f53d8eea38', + 'DataSetReferences': [{'DataSetPlaceholder': 'circulation_view', + 'DataSetArn': 'arn:aws:quicksight:us-west-2:128682227026:dataset/e9e15c78-0193-4e4c-9a49-ed005569297d'}, + {'DataSetPlaceholder': 'patron_events', + 'DataSetArn': 'ds_2_arn'}]}}} + + +def describe_template_definition_response(): + return {'ResponseMetadata': {'RequestId': 'c9ebc247-e7f7-4140-913c-2a1288ff43f6', 'HTTPStatusCode': 200, + 'HTTPHeaders': {'date': 'Tue, 05 Sep 2023 21:59:51 GMT', + 'content-type': 'application/json', 'content-length': '173310', + 'connection': 'keep-alive', + 'x-amzn-requestid': 'c9ebc247-e7f7-4140-913c-2a1288ff43f6'}, + 'RetryAttempts': 0}, 'Status': 200, 'Name': 'library', + 'TemplateId': 'library-template', + 'ResourceStatus': 'CREATION_SUCCESSFUL', + 'ThemeArn': 'arn:aws:quicksight:us-west-2:128682227026:theme/5f5e7417-a800-4812-9e59-dc44e0580412', + 'Definition': {'DataSetConfigurations': [{'Placeholder': 'circulation_view', 'DataSetSchema': { + 'ColumnSchemaList': [{'Name': 'fiction', 'DataType': 'INTEGER'}, + {'Name': 'location', 'DataType': 'STRING'}, + {'Name': 'event_type', 'DataType': 'STRING'}, + {'Name': 'audience', 'DataType': 'STRING'}, + {'Name': 'medium', 'DataType': 'STRING'}, + {'Name': 'time_stamp', 'DataType': 'DATETIME'}, + {'Name': 'distributor', 'DataType': 'STRING'}, + {'Name': 'author', 'DataType': 'STRING'}, + {'Name': 'libary_short_name', 'DataType': 'STRING'}, + {'Name': 'title', 'DataType': 'STRING'}, + {'Name': 'open_access', 'DataType': 'INTEGER'}, + {'Name': 'collection_name', 'DataType': 'STRING'}, + {'Name': 'genre', 'DataType': 'STRING'}, + {'Name': 'library_name', 'DataType': 'STRING'}]}, 'ColumnGroupSchemaList': []}, + {'Placeholder': 'patron_events', 'DataSetSchema': { + 'ColumnSchemaList': [ + {'Name': 'location', 'DataType': 'STRING'}, + {'Name': 'event_type', 'DataType': 'STRING'}, + {'Name': 'time_stamp', + 'DataType': 'DATETIME'}, + {'Name': 'library_name', + 'DataType': 'STRING'}]}, + 'ColumnGroupSchemaList': []}], }, + } + + +def describe_data_set_1_response(): + return {'ResponseMetadata': {'RequestId': '5c7285af-6ea4-4192-af69-8f8093785112', 'HTTPStatusCode': 200, + 'HTTPHeaders': {'date': 'Wed, 06 Sep 2023 00:02:50 GMT', + 'content-type': 'application/json', 'content-length': '4642', + 'connection': 'keep-alive', + 'x-amzn-requestid': '5c7285af-6ea4-4192-af69-8f8093785112'}, + 'RetryAttempts': 0}, 'Status': 200, + 'DataSet': {'Arn': 'arn:aws:quicksight:us-west-2:128682227026:dataset/e9e15c78-0193-4e4c-9a49-ed005569297d', + 'DataSetId': 'e9e15c78-0193-4e4c-9a49-ed005569297d', 'Name': 'circulation_events_view', + 'CreatedTime': datetime(2023, 2, 28, 13, 39, 33, 923000, tzinfo=tzlocal()), + 'LastUpdatedTime': datetime(2023, 6, 7, 10, 5, 32, 640000, tzinfo=tzlocal()), + 'PhysicalTableMap': {'25046cd8-e08f-41e0-8af8-5259b64499fd': {'CustomSql': { + 'DataSourceArn': 'arn:aws:quicksight:us-west-2:128682227026:datasource/a4e44abb-c1fd-4b5a-be3f-daca72d50e0a', + 'Name': 'circulation_events_view', + 'SqlQuery': 'select \n ce.time_stamp, \n l.short_name as libary_short_name, \n l.name as library_name,\n l.location as location,\n et.name as event_type, \n i.identifier,\n it.name as identifier_type,\n c.name as collection_name, \n ce.title, \n ce.author,\n ce.audience,\n ce.publisher,\n ce.language,\n ce.genre,\n ce.open_access,\n ce.fiction,\n ce.distributor,\n ce.medium\nfrom \n circulation_events ce,\n libraries l,\n collections c,\n circulation_event_types et,\n identifiers i,\n identifier_types it\nwhere \n ce.library_id = l.id and\n ce.event_type_id = et.id and\n ce.collection_id = c.id and\n ce.identifier_id = i.id and \n i.identifier_type_id = it.id', + 'Columns': [{'Name': 'time_stamp', 'Type': 'DATETIME'}, + {'Name': 'libary_short_name', 'Type': 'STRING'}, + {'Name': 'library_name', 'Type': 'STRING'}, + {'Name': 'location', 'Type': 'STRING'}, + {'Name': 'event_type', 'Type': 'STRING'}, + {'Name': 'identifier', 'Type': 'STRING'}, + {'Name': 'identifier_type', 'Type': 'STRING'}, + {'Name': 'collection_name', 'Type': 'STRING'}, + {'Name': 'title', 'Type': 'STRING'}, {'Name': 'author', 'Type': 'STRING'}, + {'Name': 'audience', 'Type': 'STRING'}, {'Name': 'publisher', 'Type': 'STRING'}, + {'Name': 'language', 'Type': 'STRING'}, {'Name': 'genre', 'Type': 'STRING'}, + {'Name': 'open_access', 'Type': 'BIT'}, {'Name': 'fiction', 'Type': 'BIT'}, + {'Name': 'distributor', 'Type': 'STRING'}, + {'Name': 'medium', 'Type': 'STRING'}]}}}, 'LogicalTableMap': { + '6c80275e-d03d-417c-a8cd-57d93e58129b': {'Alias': 'circulation_events_view', 'DataTransforms': [{ + 'ProjectOperation': { + 'ProjectedColumns': [ + 'time_stamp', + 'libary_short_name', + 'library_name', + 'location', + 'event_type', + 'identifier', + 'identifier_type', + 'collection_name', + 'title', + 'author', + 'audience', + 'publisher', + 'language', + 'genre', + 'open_access', + 'fiction', + 'distributor', + 'medium']}}], + 'Source': { + 'PhysicalTableId': '25046cd8-e08f-41e0-8af8-5259b64499fd'}}}, + 'OutputColumns': [{'Name': 'time_stamp', 'Type': 'DATETIME'}, + {'Name': 'libary_short_name', 'Type': 'STRING'}, + {'Name': 'library_name', 'Type': 'STRING'}, + {'Name': 'location', 'Type': 'STRING'}, + {'Name': 'event_type', 'Type': 'STRING'}, + {'Name': 'identifier', 'Type': 'STRING'}, + {'Name': 'identifier_type', 'Type': 'STRING'}, + {'Name': 'collection_name', 'Type': 'STRING'}, + {'Name': 'title', 'Type': 'STRING'}, {'Name': 'author', 'Type': 'STRING'}, + {'Name': 'audience', 'Type': 'STRING'}, + {'Name': 'publisher', 'Type': 'STRING'}, + {'Name': 'language', 'Type': 'STRING'}, {'Name': 'genre', 'Type': 'STRING'}, + {'Name': 'open_access', 'Type': 'INTEGER'}, + {'Name': 'fiction', 'Type': 'INTEGER'}, + {'Name': 'distributor', 'Type': 'STRING'}, + {'Name': 'medium', 'Type': 'STRING'}], 'ImportMode': 'DIRECT_QUERY', + 'ConsumedSpiceCapacityInBytes': 0, 'FieldFolders': {}, + 'DataSetUsageConfiguration': {'DisableUseAsDirectQuerySource': False, + 'DisableUseAsImportedSource': False}}, + 'RequestId': '5c7285af-6ea4-4192-af69-8f8093785112'} + + +def describe_data_set_2_response(): + return {'ResponseMetadata': {'RequestId': '3e6ad967-c44d-4a86-8391-be51ebf978c5', 'HTTPStatusCode': 200, + 'HTTPHeaders': {'date': 'Wed, 06 Sep 2023 00:07:58 GMT', + 'content-type': 'application/json', 'content-length': '2564', + 'connection': 'keep-alive', + 'x-amzn-requestid': '3e6ad967-c44d-4a86-8391-be51ebf978c5'}, + 'RetryAttempts': 0}, 'Status': 200, + 'DataSet': {'Arn': 'arn:aws:quicksight:us-west-2:128682227026:dataset/86eb4ca5-9552-4ba6-8b1b-7ef1b9b40f78', + 'DataSetId': '86eb4ca5-9552-4ba6-8b1b-7ef1b9b40f78', 'Name': 'patron_events', + 'CreatedTime': datetime(2023, 2, 28, 16, 8, 15, 620000, tzinfo=tzlocal()), + 'LastUpdatedTime': datetime(2023, 3, 1, 8, 28, 1, 477000, tzinfo=tzlocal()), + 'PhysicalTableMap': {'50873ea6-0c3a-4989-97e1-eb740e8a3348': {'CustomSql': { + 'DataSourceArn': 'arn:aws:quicksight:us-west-2:128682227026:datasource/a4e44abb-c1fd-4b5a-be3f-daca72d50e0a', + 'Name': 'patron_events', + 'SqlQuery': 'select \n pe.time_stamp, \n l.short_name as library_short_name, \n l.name as library_name, \n l.location, \n l.state, \n ev.name as event_type \nfrom \n patron_events pe, \n libraries l, \n circulation_event_types ev \nwhere \n pe.library_id = l.id and \n pe.event_type_id = ev.id', + 'Columns': [{'Name': 'time_stamp', 'Type': 'DATETIME'}, + {'Name': 'library_short_name', 'Type': 'STRING'}, + {'Name': 'library_name', 'Type': 'STRING'}, + {'Name': 'location', 'Type': 'STRING'}, {'Name': 'state', 'Type': 'STRING'}, + {'Name': 'event_type', 'Type': 'STRING'}]}}}, 'LogicalTableMap': { + '4dc4e51c-76b2-4595-8b3b-1759f76a05c4': {'Alias': 'patron_events', 'DataTransforms': [{ + 'ProjectOperation': { + 'ProjectedColumns': [ + 'time_stamp', + 'library_short_name', + 'library_name', + 'location', + 'state', + 'event_type']}}], + 'Source': { + 'PhysicalTableId': '50873ea6-0c3a-4989-97e1-eb740e8a3348'}}}, + 'OutputColumns': [{'Name': 'time_stamp', 'Type': 'DATETIME'}, + {'Name': 'library_short_name', 'Type': 'STRING'}, + {'Name': 'library_name', 'Type': 'STRING'}, + {'Name': 'location', 'Type': 'STRING'}, {'Name': 'state', 'Type': 'STRING'}, + {'Name': 'event_type', 'Type': 'STRING'}], 'ImportMode': 'DIRECT_QUERY', + 'ConsumedSpiceCapacityInBytes': 0, 'FieldFolders': {}, + 'DataSetUsageConfiguration': {'DisableUseAsDirectQuerySource': False, + 'DisableUseAsImportedSource': False}}, + 'RequestId': '3e6ad967-c44d-4a86-8391-be51ebf978c5'} diff --git a/tests/core/operation/test_export_analysis_operation.py b/tests/core/operation/test_export_analysis_operation.py new file mode 100644 index 0000000..80c142d --- /dev/null +++ b/tests/core/operation/test_export_analysis_operation.py @@ -0,0 +1,67 @@ +import os +from datetime import datetime + +import botocore +from botocore.stub import Stubber +from dateutil.tz import tzlocal + +from core.operation.export_analysis_operation import ExportAnalysisOperation +from tests.core.operation.analysis_test_responses import get_analysis_description_response, \ + get_analysis_definition_response, create_template_response, describe_template_definition_response, \ + describe_data_set_2_response, describe_data_set_1_response + + +class TestExportAnalysisOperation: + + def test(self): + analysis_id = "my-quicksight-analysis-id" + output_dir = "/tmp/test-output" + account = "012345678910" + + qs_client = botocore.session.get_session().create_client('quicksight') + with Stubber(qs_client) as stub: + analysis_description_params = {'AwsAccountId': account, 'AnalysisId': analysis_id} + + stub.add_response('describe_analysis', service_response=get_analysis_description_response(analysis_id), + expected_params=analysis_description_params) + + stub.add_response('describe_analysis_definition', service_response=get_analysis_definition_response(), + expected_params=analysis_description_params) + + create_template_params = {'AwsAccountId': account, 'TemplateId': 'library-template', 'Name': 'library', + 'SourceEntity': { + 'SourceAnalysis': { + 'Arn': 'arn', + 'DataSetReferences': [{'DataSetPlaceholder': 'circulation_view', + 'DataSetArn': 'arn:aws:quicksight:us-west-2:128682227026:dataset/e9e15c78-0193-4e4c-9a49-ed005569297d'}, + {'DataSetPlaceholder': 'patron_events', + 'DataSetArn': 'arn:aws:quicksight:us-west-2:128682227026:dataset/86eb4ca5-9552-4ba6-8b1b-7ef1b9b40f78'}]}}} + + stub.add_response('create_template', service_response=create_template_response(), + expected_params=create_template_params) + + stub.add_response('describe_template_definition', service_response=describe_template_definition_response(), + expected_params={ + 'AwsAccountId': account, 'TemplateId': 'library-template', 'AliasName': "$LATEST", }) + + stub.add_response('describe_data_set', service_response=describe_data_set_1_response(), expected_params={ + 'AwsAccountId': account, 'DataSetId': 'e9e15c78-0193-4e4c-9a49-ed005569297d', + }) + + stub.add_response('describe_data_set', service_response=describe_data_set_2_response(), expected_params={ + 'AwsAccountId': account, 'DataSetId': '86eb4ca5-9552-4ba6-8b1b-7ef1b9b40f78', + }) + + op = ExportAnalysisOperation(qs_client=qs_client, analysis_id=analysis_id, output_dir=output_dir, + aws_account_id=account) + + op.execute() + + assets_dir = os.path.join(output_dir, "assets") + data_sets_dir = os.path.join(assets_dir, "data-sets") + templates_dir = os.path.join(assets_dir, "templates") + template_file = os.path.join(templates_dir, "library.json") + patron_events_file = os.path.join(data_sets_dir, "patron_events.json") + circulation_events_file = os.path.join(data_sets_dir, "circulation_view.json") + for p in [assets_dir, data_sets_dir, templates_dir, template_file, patron_events_file, circulation_events_file]: + assert os.path.exists(p) diff --git a/tests/core/test_cli.py b/tests/core/test_cli.py new file mode 100644 index 0000000..94a2e6c --- /dev/null +++ b/tests/core/test_cli.py @@ -0,0 +1,10 @@ +from click.testing import CliRunner + +from core.cli import export_analysis + +class TestCli: + def test_export_analysis_help(self): + runner = CliRunner() + result = runner.invoke(export_analysis, ['--help']) + assert result.exit_code == 0 + assert "Exports a template" in result.output diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..9f316b1 --- /dev/null +++ b/tox.ini @@ -0,0 +1,25 @@ +[tox] +envlist = py{38,39,310,311}-core +skipsdist = true + +[testenv] +commands_pre = + poetry install -v +commands = + core: pytest {posargs:tests/core} +allowlist_externals = + python + poetry + pytest + + +[gh-actions] +python = + 3.8: py38 + 3.9: py39 + 3.10: py310 + 3.11: py311 + +[gh-actions:env] +MODULE = + Core: core From 806b96ddae2c6b2e0c40a1a17fbdbcd50561a0cd Mon Sep 17 00:00:00 2001 From: Daniel Bernstein Date: Wed, 6 Sep 2023 09:08:50 -0700 Subject: [PATCH 08/30] Fix github workflow file errors (remove references to non-existent ci group) --- .github/workflows/lint.yml | 2 +- .github/workflows/mypy.yml | 2 +- .github/workflows/test-build.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index c06bb78..ce97af7 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -23,7 +23,7 @@ jobs: - name: Install Pre-commit run: | - poetry install --only ci + poetry install env: POETRY_VIRTUALENVS_CREATE: false diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml index beca934..054014a 100644 --- a/.github/workflows/mypy.yml +++ b/.github/workflows/mypy.yml @@ -26,7 +26,7 @@ jobs: sudo apt-get install --yes libxmlsec1-dev libxml2-dev - name: Install Python Packages 📦 - run: poetry install --without ci + run: poetry install - name: Run MyPy 🪄 run: poetry run mypy diff --git a/.github/workflows/test-build.yml b/.github/workflows/test-build.yml index 0f55818..a16a761 100644 --- a/.github/workflows/test-build.yml +++ b/.github/workflows/test-build.yml @@ -45,7 +45,7 @@ jobs: - name: Install Tox run: | - poetry install --only ci + poetry install env: POETRY_VIRTUALENVS_CREATE: false From 365973f66926f4a4b86b1810dced545c374b90a5 Mon Sep 17 00:00:00 2001 From: Daniel Bernstein Date: Wed, 6 Sep 2023 09:11:44 -0700 Subject: [PATCH 09/30] Update poetry lock file. --- poetry.lock | 109 ++++++++++++++++++++++++++++++++-------------------- 1 file changed, 67 insertions(+), 42 deletions(-) diff --git a/poetry.lock b/poetry.lock index 71c0492..26feec1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,17 +2,17 @@ [[package]] name = "boto3" -version = "1.28.9" +version = "1.28.41" description = "The AWS SDK for Python" optional = false python-versions = ">= 3.7" files = [ - {file = "boto3-1.28.9-py3-none-any.whl", hash = "sha256:01f078047eb4d238c6b9c6cc623f2af33b4ae67980c5326691e35cb5493ff6c7"}, - {file = "boto3-1.28.9.tar.gz", hash = "sha256:4cc0c6005be910e52077227e670930ab55a41ba86cdb6d1c052571d08cd4d32c"}, + {file = "boto3-1.28.41-py3-none-any.whl", hash = "sha256:47215c08645d1ea2568f9f358f25ffd4a42e08bce9af2fa1dd70c93c682dc880"}, + {file = "boto3-1.28.41.tar.gz", hash = "sha256:2f655ab7e577c7543f9ee4e42d98641ccf02230d2f33695a6b39617b765401f5"}, ] [package.dependencies] -botocore = ">=1.31.9,<1.32.0" +botocore = ">=1.31.41,<1.32.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.6.0,<0.7.0" @@ -21,13 +21,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.31.9" +version = "1.31.41" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">= 3.7" files = [ - {file = "botocore-1.31.9-py3-none-any.whl", hash = "sha256:e56ccd3536a90094ea5b176b5dd33bfe4f049efdf71af468ea1661bd424c787d"}, - {file = "botocore-1.31.9.tar.gz", hash = "sha256:bd849d3ac95f1781385ed831d753a04a3ec870a59d6598175aaedd71dc2baf5f"}, + {file = "botocore-1.31.41-py3-none-any.whl", hash = "sha256:c33a453328d361c089e6a8f9dd671a7b939288e539e6550c7030152e8162e906"}, + {file = "botocore-1.31.41.tar.gz", hash = "sha256:4dad7c5a5e70940de54ebf8de3955450c1f092f43cacff8103819d1e7d5374fa"}, ] [package.dependencies] @@ -40,13 +40,13 @@ crt = ["awscrt (==0.16.26)"] [[package]] name = "click" -version = "8.1.6" +version = "8.1.7" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.6-py3-none-any.whl", hash = "sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5"}, - {file = "click-8.1.6.tar.gz", hash = "sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd"}, + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] [package.dependencies] @@ -64,42 +64,30 @@ files = [ ] [[package]] -name = "fastcore" -version = "1.5.29" -description = "Python supercharged for fastai development" +name = "exceptiongroup" +version = "1.1.3" +description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "fastcore-1.5.29-py3-none-any.whl", hash = "sha256:a7d7e89faf968f2d8584df2deca344c3974f6cf476e1299cd3c067d8fa7440e9"}, - {file = "fastcore-1.5.29.tar.gz", hash = "sha256:f1a2eb04eb7933f3f9eb4064852817df44dc96e20fab5658c14c035815269a3f"}, + {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, + {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, ] -[package.dependencies] -packaging = "*" -pip = "*" - [package.extras] -dev = ["jupyterlab", "matplotlib", "nbdev (>=0.2.39)", "numpy", "pandas", "pillow", "torch"] +test = ["pytest (>=6)"] [[package]] -name = "ghapi" -version = "1.0.4" -description = "A python client for the GitHub API" +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" files = [ - {file = "ghapi-1.0.4-py3-none-any.whl", hash = "sha256:cb5c7008a89c270157adbaf5b2fd6951e9d9fc76131b9bec16118a558a6a4c04"}, - {file = "ghapi-1.0.4.tar.gz", hash = "sha256:9e7632c762d6f9c288e3b046b2d58c2f7992dda7c925683df435440912b10625"}, + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] -[package.dependencies] -fastcore = ">=1.5.4" -packaging = "*" -pip = "*" - -[package.extras] -dev = ["jsonref", "matplotlib"] - [[package]] name = "jmespath" version = "1.0.1" @@ -123,16 +111,42 @@ files = [ ] [[package]] -name = "pip" -version = "23.2" -description = "The PyPA recommended tool for installing Python packages." +name = "pluggy" +version = "1.3.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pytest" +version = "7.4.1" +description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pip-23.2-py3-none-any.whl", hash = "sha256:78e5353a9dda374b462f2054f83a7b63f3f065c98236a68361845c1b0ee7e35f"}, - {file = "pip-23.2.tar.gz", hash = "sha256:a160a170f3331d9ca1a0247eb1cd79c758879f1f81158f9cd05bbb5df80bea5c"}, + {file = "pytest-7.4.1-py3-none-any.whl", hash = "sha256:460c9a59b14e27c602eb5ece2e47bec99dc5fc5f6513cf924a7d03a578991b1f"}, + {file = "pytest-7.4.1.tar.gz", hash = "sha256:2f2301e797521b23e4d2585a0a3d7b5e50fdddaaf7e7d6773ea26ddb17c213ab"}, ] +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + [[package]] name = "python-dateutil" version = "2.8.2" @@ -149,13 +163,13 @@ six = ">=1.5" [[package]] name = "s3transfer" -version = "0.6.1" +version = "0.6.2" description = "An Amazon S3 Transfer Manager" optional = false python-versions = ">= 3.7" files = [ - {file = "s3transfer-0.6.1-py3-none-any.whl", hash = "sha256:3c0da2d074bf35d6870ef157158641178a4204a6e689e82546083e31e0311346"}, - {file = "s3transfer-0.6.1.tar.gz", hash = "sha256:640bb492711f4c0c0905e1f62b6aaeb771881935ad27884852411f8e9cacbca9"}, + {file = "s3transfer-0.6.2-py3-none-any.whl", hash = "sha256:b014be3a8a2aab98cfe1abc7229cc5a9a0cf05eb9c1f2b86b230fd8df3f78084"}, + {file = "s3transfer-0.6.2.tar.gz", hash = "sha256:cab66d3380cca3e70939ef2255d01cd8aece6a4907a9528740f668c4b0611861"}, ] [package.dependencies] @@ -175,6 +189,17 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + [[package]] name = "urllib3" version = "1.26.16" @@ -194,4 +219,4 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "c8dcbdc818c56d1329f0689d39c87a561865bc26553e31050b079b45f81e3176" +content-hash = "94ac11370453c6009b8fd67ad18834668e6d8ae563542d02296bf0d7e583140a" From 25da5b3a91fab9ff490eb10339da7b2dbacc4c8c Mon Sep 17 00:00:00 2001 From: Daniel Bernstein Date: Wed, 6 Sep 2023 09:17:02 -0700 Subject: [PATCH 10/30] Initial pre-commit formatting of all files. --- .github/workflows/mypy.yml | 2 +- .github/workflows/test-build.yml | 3 +- .pre-commit-config.yaml | 71 +++ bin/palace-quicksight | 3 +- core/cli.py | 127 ++-- core/operation/baseoperation.py | 37 +- core/operation/export_analysis_operation.py | 79 ++- core/operation/import_from_json_operation.py | 59 +- .../publish_dashboard_from_template.py | 91 ++- core/util.py | 2 +- pyproject.toml | 1 - .../core/operation/analysis_test_responses.py | 582 ++++++++++++------ .../test_export_analysis_operation.py | 133 ++-- tests/core/test_cli.py | 9 +- 14 files changed, 806 insertions(+), 393 deletions(-) create mode 100644 .pre-commit-config.yaml diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml index 054014a..3d7489a 100644 --- a/.github/workflows/mypy.yml +++ b/.github/workflows/mypy.yml @@ -26,7 +26,7 @@ jobs: sudo apt-get install --yes libxmlsec1-dev libxml2-dev - name: Install Python Packages 📦 - run: poetry install + run: poetry install - name: Run MyPy 🪄 run: poetry run mypy diff --git a/.github/workflows/test-build.yml b/.github/workflows/test-build.yml index a16a761..fe8005e 100644 --- a/.github/workflows/test-build.yml +++ b/.github/workflows/test-build.yml @@ -45,7 +45,7 @@ jobs: - name: Install Tox run: | - poetry install + poetry install env: POETRY_VIRTUALENVS_CREATE: false @@ -53,4 +53,3 @@ jobs: run: tox env: MODULE: ${{ matrix.module }} - diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..89e9708 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,71 @@ +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + - id: check-json + - id: check-ast + - id: check-toml + - id: check-shebang-scripts-are-executable + - id: check-executables-have-shebangs + - id: check-merge-conflict + - id: check-added-large-files + - id: mixed-line-ending + + - repo: https://github.com/asottile/pyupgrade + rev: v3.3.2 + hooks: + - id: pyupgrade + args: + - --py38-plus + - --keep-runtime-typing + + - repo: https://github.com/myint/autoflake + rev: v2.1.1 + hooks: + - id: autoflake + args: + - --in-place + - --remove-all-unused-imports + - --ignore-init-module-imports + + - repo: https://github.com/psf/black + rev: 22.10.0 + hooks: + - id: black + name: Run black + + - repo: https://github.com/PyCQA/isort + rev: 5.12.0 + hooks: + - id: isort + name: Run isort + + - repo: https://github.com/sirosen/check-jsonschema + rev: 0.22.0 + hooks: + - id: check-github-workflows + - id: check-github-actions + + - repo: https://github.com/pappasam/toml-sort + rev: v0.23.0 + hooks: + - id: toml-sort + args: [] + files: pyproject.toml + + - repo: https://github.com/jackdewinter/pymarkdown + rev: v0.9.9 + hooks: + - id: pymarkdown + args: + - --config + - .pymarkdown.config.json + - scan + +# Exclude test files, since they may be intentionally malformed +exclude: ^tests/(core)/files/ diff --git a/bin/palace-quicksight b/bin/palace-quicksight index 188d18a..d2262f9 100755 --- a/bin/palace-quicksight +++ b/bin/palace-quicksight @@ -4,8 +4,9 @@ import os import sys from core.cli import cli + bin_dir = os.path.split(__file__)[0] package_dir = os.path.join(bin_dir, "..") sys.path.append(os.path.abspath(package_dir)) -cli() \ No newline at end of file +cli() diff --git a/core/cli.py b/core/cli.py index 885dbd1..f578935 100644 --- a/core/cli.py +++ b/core/cli.py @@ -1,18 +1,20 @@ +import logging + import boto3 import click -import logging - from core.operation.export_analysis_operation import ExportAnalysisOperation from core.operation.import_from_json_operation import ImportFromJsonOperation -from core.operation.publish_dashboard_from_template import PublishDashboardFromTemplateOperation +from core.operation.publish_dashboard_from_template import \ + PublishDashboardFromTemplateOperation log = logging.getLogger("core.cli") -def create_quicksight_client(aws_profile:str): +def create_quicksight_client(aws_profile: str): boto3.setup_default_session(profile_name=aws_profile) - return boto3.client('quicksight') + return boto3.client("quicksight") + @click.group() def cli(): @@ -20,12 +22,19 @@ def cli(): @click.command() -@click.option('--aws-profile', required=True, help='The AWS account profile') -@click.option('--aws-account-id', required=True, help='The ID of the AWS account') -@click.option('--analysis-id', required=True, help='The ID of the Analysis to be exported') -@click.option('--output-dir', required=True, - help='The path to the output directory to which resources will be exported') -def export_analysis(aws_profile: str, aws_account_id: str, analysis_id: str, output_dir: str): +@click.option("--aws-profile", required=True, help="The AWS account profile") +@click.option("--aws-account-id", required=True, help="The ID of the AWS account") +@click.option( + "--analysis-id", required=True, help="The ID of the Analysis to be exported" +) +@click.option( + "--output-dir", + required=True, + help="The path to the output directory to which resources will be exported", +) +def export_analysis( + aws_profile: str, aws_account_id: str, analysis_id: str, output_dir: str +): """ Exports a template and dependent data sets based on the specified analysis to JSON files. """ @@ -34,25 +43,46 @@ def export_analysis(aws_profile: str, aws_account_id: str, analysis_id: str, out click.echo(f"analysis_id= {analysis_id}") click.echo(f"aws_account_id= {aws_account_id}") click.echo(f"output_dir= {output_dir}") - ExportAnalysisOperation(qs_client=create_quicksight_client(aws_profile=aws_profile), aws_account_id=aws_account_id, analysis_id=analysis_id, - output_dir=output_dir).execute() + ExportAnalysisOperation( + qs_client=create_quicksight_client(aws_profile=aws_profile), + aws_account_id=aws_account_id, + analysis_id=analysis_id, + output_dir=output_dir, + ).execute() cli.add_command(export_analysis) @click.command -@click.option('--aws-profile', required=True, help='The AWS account profile') -@click.option('--aws-account-id', required=True, help='The ID of the AWS account') -@click.option('--template-name', required=True, help='The name of the template to be restored') -@click.option('--data-source-arn', required=True, - help='The ARN of the data source you want to associate with the data sets') -@click.option('--target-namespace', required=True, - help='The namespace you wish to target (e.g. tpp-prod, tpp-dev, tpp-staging).') -@click.option('--input-dir', required=True, - help='The path to the input directory from which resources will be imported') -def import_template(aws_profile: str, aws_account_id: str, template_name: str, data_source_arn: str, - target_namespace: str, input_dir: str): +@click.option("--aws-profile", required=True, help="The AWS account profile") +@click.option("--aws-account-id", required=True, help="The ID of the AWS account") +@click.option( + "--template-name", required=True, help="The name of the template to be restored" +) +@click.option( + "--data-source-arn", + required=True, + help="The ARN of the data source you want to associate with the data sets", +) +@click.option( + "--target-namespace", + required=True, + help="The namespace you wish to target (e.g. tpp-prod, tpp-dev, tpp-staging).", +) +@click.option( + "--input-dir", + required=True, + help="The path to the input directory from which resources will be imported", +) +def import_template( + aws_profile: str, + aws_account_id: str, + template_name: str, + data_source_arn: str, + target_namespace: str, + input_dir: str, +): """ Import template and datasource files from json """ @@ -64,23 +94,38 @@ def import_template(aws_profile: str, aws_account_id: str, template_name: str, d click.echo(f"data_source_arn = {data_source_arn}") click.echo(f"input_dir= {input_dir}") - ImportFromJsonOperation(qs_client=create_quicksight_client(aws_profile), aws_account_id=aws_account_id, template_name=template_name, - target_namespace=target_namespace, data_source_arn=data_source_arn, - input_dir=input_dir).execute() + ImportFromJsonOperation( + qs_client=create_quicksight_client(aws_profile), + aws_account_id=aws_account_id, + template_name=template_name, + target_namespace=target_namespace, + data_source_arn=data_source_arn, + input_dir=input_dir, + ).execute() cli.add_command(import_template) @click.command -@click.option('--aws-profile', required=True, help='The AWS account profile') -@click.option('--aws-account-id', required=True, help='The ID of the AWS account') -@click.option('--template-id', required=True, help='The ID of the template to be restored') -@click.option('--target-namespace', required=True, - help='The namespace you wish to target (e.g. tpp-prod, tpp-dev, tpp-staging).') -@click.option('--group-name', required=True, help='Name of the Quicksight User Group') -def publish_dashboard(aws_profile: str, aws_account_id: str, template_id: str, target_namespace: str, - group_name: str): +@click.option("--aws-profile", required=True, help="The AWS account profile") +@click.option("--aws-account-id", required=True, help="The ID of the AWS account") +@click.option( + "--template-id", required=True, help="The ID of the template to be restored" +) +@click.option( + "--target-namespace", + required=True, + help="The namespace you wish to target (e.g. tpp-prod, tpp-dev, tpp-staging).", +) +@click.option("--group-name", required=True, help="Name of the Quicksight User Group") +def publish_dashboard( + aws_profile: str, + aws_account_id: str, + template_id: str, + target_namespace: str, + group_name: str, +): """ Create/Update a dashboard from a template """ @@ -90,11 +135,13 @@ def publish_dashboard(aws_profile: str, aws_account_id: str, template_id: str, t click.echo(f"aws_account_id = {aws_account_id}") click.echo(f"template_id = {template_id}") click.echo(f"group_name = {group_name}") - PublishDashboardFromTemplateOperation(qs_client=create_quicksight_client(aws_profile), - aws_account_id=aws_account_id, - template_id=template_id, - target_namespace=target_namespace, - group_name=group_name).execute() + PublishDashboardFromTemplateOperation( + qs_client=create_quicksight_client(aws_profile), + aws_account_id=aws_account_id, + template_id=template_id, + target_namespace=target_namespace, + group_name=group_name, + ).execute() cli.add_command(publish_dashboard) diff --git a/core/operation/baseoperation.py b/core/operation/baseoperation.py index e1c628a..02272c4 100644 --- a/core/operation/baseoperation.py +++ b/core/operation/baseoperation.py @@ -1,11 +1,8 @@ import json +import logging import os from abc import abstractmethod - -import boto3 -import logging - ASSET_DIR = "assets" TEMPLATE_DIR = os.path.join(ASSET_DIR, "templates") DATA_SET_DIR = os.path.join(ASSET_DIR, "data-sets") @@ -36,30 +33,40 @@ def _create_or_update_template(self, template_data: dict) -> [str, str]: response = self._qs_client.create_template(**template_data) except self._qs_client.exceptions.ResourceExistsException as e: response = self._qs_client.update_template(**template_data) - httpStatus = response['ResponseMetadata']['HTTPStatusCode'] + httpStatus = response["ResponseMetadata"]["HTTPStatusCode"] if httpStatus != 202: - self._log.error(f"Unexpected response from create_template request: {httpStatus} ") + self._log.error( + f"Unexpected response from create_template request: {httpStatus} " + ) raise Exception( - f"Unexpected response from trying to create/update template : {json.dumps(response, indent=4)} ") + f"Unexpected response from trying to create/update template : {json.dumps(response, indent=4)} " + ) else: return response["Arn"], response["VersionArn"], response["TemplateId"] - def _create_or_update_template_from_template_definition(self, template_definition: dict): + def _create_or_update_template_from_template_definition( + self, template_definition: dict + ): template_definition["AwsAccountId"] = self._aws_account_id return self._create_or_update_template(template_data=template_definition) - def _resolve_data_set_id_from_placeholder(self, namespace:str, placeholder:str) -> str: + def _resolve_data_set_id_from_placeholder( + self, namespace: str, placeholder: str + ) -> str: return namespace + "-" + placeholder def _get_template_definition(self, template_id): - return self._qs_client.describe_template_definition(AwsAccountId=self._aws_account_id, TemplateId=template_id, - AliasName="$LATEST") + return self._qs_client.describe_template_definition( + AwsAccountId=self._aws_account_id, + TemplateId=template_id, + AliasName="$LATEST", + ) def _describe_data_set(self, data_set_id): - response = self._qs_client.describe_data_set(AwsAccountId=self._aws_account_id, DataSetId=data_set_id) - return response['DataSet'] + response = self._qs_client.describe_data_set( + AwsAccountId=self._aws_account_id, DataSetId=data_set_id + ) + return response["DataSet"] def _resolve_path(self, *paths): return os.path.join(*paths) - - diff --git a/core/operation/export_analysis_operation.py b/core/operation/export_analysis_operation.py index 13e342f..824ddc2 100644 --- a/core/operation/export_analysis_operation.py +++ b/core/operation/export_analysis_operation.py @@ -2,8 +2,9 @@ import os from typing import List -from core.operation.baseoperation import BaseOperation, TEMPLATE_DIR, DATA_SET_DIR -from core.util import retry, recursively_replace_value +from core.operation.baseoperation import (DATA_SET_DIR, TEMPLATE_DIR, + BaseOperation) +from core.util import recursively_replace_value, retry class ExportAnalysisOperation(BaseOperation): @@ -14,7 +15,7 @@ class ExportAnalysisOperation(BaseOperation): def __init__(self, analysis_id: str, output_dir: str, *args, **kwargs): self._analysis_id = analysis_id self._output_dir = output_dir - super(ExportAnalysisOperation, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def execute(self): @@ -22,37 +23,47 @@ def execute(self): os.makedirs(self._resolve_path(self._output_dir, DATA_SET_DIR), exist_ok=True) # retrieve description - analysis_description = self._qs_client.describe_analysis(AwsAccountId=self._aws_account_id, - AnalysisId=self._analysis_id) + analysis_description = self._qs_client.describe_analysis( + AwsAccountId=self._aws_account_id, AnalysisId=self._analysis_id + ) # check that analysis exists - https_status = analysis_description['ResponseMetadata']['HTTPStatusCode'] + https_status = analysis_description["ResponseMetadata"]["HTTPStatusCode"] if https_status != 200: - self._log.error(f"Unexpected response from describe_analysis request: {https_status} ") + self._log.error( + f"Unexpected response from describe_analysis request: {https_status} " + ) return # retrieve definition - analysis_definition = self._qs_client.describe_analysis_definition(AwsAccountId=self._aws_account_id, - AnalysisId=self._analysis_id) + analysis_definition = self._qs_client.describe_analysis_definition( + AwsAccountId=self._aws_account_id, AnalysisId=self._analysis_id + ) # extract DataSet references analysis = analysis_description["Analysis"] - data_set_identifier_declarations = analysis_definition["Definition"]["DataSetIdentifierDeclarations"] + data_set_identifier_declarations = analysis_definition["Definition"][ + "DataSetIdentifierDeclarations" + ] data_set_references = [] for did in data_set_identifier_declarations: - data_set_references.append({ - 'DataSetPlaceholder': did["Identifier"], - 'DataSetArn': did["DataSetArn"], - }) + data_set_references.append( + { + "DataSetPlaceholder": did["Identifier"], + "DataSetArn": did["DataSetArn"], + } + ) # create a template from the analysis - arn, version_arn, template_id = \ - self._create_or_update_template_from_analysis(analysis=analysis, - data_set_references=data_set_references) + arn, version_arn, template_id = self._create_or_update_template_from_analysis( + analysis=analysis, data_set_references=data_set_references + ) def verify_success() -> bool: - self._template_definition = self._get_template_definition(template_id=template_id) + self._template_definition = self._get_template_definition( + template_id=template_id + ) return "SUCCESSFUL" in self._template_definition["ResourceStatus"] @@ -68,7 +79,9 @@ def verify_success() -> bool: # save the template as json file definition_json_str = json.dumps(map_to_save, indent=4) - template_file = self._resolve_path(self._output_dir, TEMPLATE_DIR, self._template_definition["Name"] + ".json") + template_file = self._resolve_path( + self._output_dir, TEMPLATE_DIR, self._template_definition["Name"] + ".json" + ) with open(template_file, "w") as template_file: template_file.write(definition_json_str) @@ -80,18 +93,20 @@ def verify_success() -> bool: ds_file = self._save_dataset_to_file(di=di) files_to_update.append(ds_file) - def _create_or_update_template_from_analysis(self, analysis, data_set_references: List): + def _create_or_update_template_from_analysis( + self, analysis, data_set_references: List + ): template_name = analysis["Name"] params = { - 'AwsAccountId': self._aws_account_id, - 'TemplateId': template_name + "-template", - 'Name': analysis["Name"], - 'SourceEntity': { - 'SourceAnalysis': { - 'Arn': analysis['Arn'], - 'DataSetReferences': data_set_references, + "AwsAccountId": self._aws_account_id, + "TemplateId": template_name + "-template", + "Name": analysis["Name"], + "SourceEntity": { + "SourceAnalysis": { + "Arn": analysis["Arn"], + "DataSetReferences": data_set_references, }, - } + }, } return self._create_or_update_template(template_data=params) @@ -101,8 +116,8 @@ def _save_dataset_to_file(self, di) -> str: :param di: dataset map :return: The path of the dataset file """ - identifier = di['Identifier'] - arn = di['DataSetArn'] + identifier = di["Identifier"] + arn = di["DataSetArn"] dataset_id = arn.split("dataset/", 1)[1] ds_def_elements_to_save = self._describe_data_set(dataset_id) # remove the following fields from the response before saving it. @@ -115,7 +130,9 @@ def _save_dataset_to_file(self, di) -> str: recursively_replace_value(ds_def_elements_to_save, "DataSourceArn", "") # save what is left to disk ds_def_str = json.dumps(ds_def_elements_to_save, indent=4) - dataset_file = self._resolve_path(self._output_dir, DATA_SET_DIR, identifier + ".json") + dataset_file = self._resolve_path( + self._output_dir, DATA_SET_DIR, identifier + ".json" + ) with open(dataset_file, "w") as dataset_file: dataset_file.write(ds_def_str) diff --git a/core/operation/import_from_json_operation.py b/core/operation/import_from_json_operation.py index 3927d1a..3548a19 100644 --- a/core/operation/import_from_json_operation.py +++ b/core/operation/import_from_json_operation.py @@ -1,27 +1,37 @@ import json -from core.operation.baseoperation import BaseOperation, TEMPLATE_DIR, DATA_SET_DIR +from core.operation.baseoperation import (DATA_SET_DIR, TEMPLATE_DIR, + BaseOperation) from core.util import recursively_replace_value class ImportFromJsonOperation(BaseOperation): """ - Imports a Quicksight template and all it's dependencies into Quicksight. + Imports a Quicksight template and all it's dependencies into Quicksight. """ - def __init__(self, template_name: str, target_namespace: str, data_source_arn: str, input_dir: str, *args, - **kwargs): + def __init__( + self, + template_name: str, + target_namespace: str, + data_source_arn: str, + input_dir: str, + *args, + **kwargs, + ): self._template_name = template_name self._target_namespace = target_namespace self._data_source_arn = data_source_arn self._intput_dir = input_dir - super(ImportFromJsonOperation, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def execute(self): # Read template file into dictionary template_data = None - template_file = self._resolve_path(self._intput_dir, TEMPLATE_DIR, self._template_name + ".json") - with open(template_file, "r") as template_file: + template_file = self._resolve_path( + self._intput_dir, TEMPLATE_DIR, self._template_name + ".json" + ) + with open(template_file) as template_file: template_data = json.loads(template_file.read()) # create namespace if not exists @@ -37,8 +47,13 @@ def execute(self): # create name template in namespace template_data["Name"] = self._target_namespace + "-" + self._template_name template_data["TemplateId"] = template_data["Name"] - arn, version_arn, template_id = self._create_or_update_template_from_template_definition( - template_definition=template_data) + ( + arn, + version_arn, + template_id, + ) = self._create_or_update_template_from_template_definition( + template_definition=template_data + ) # for each data set id associated with the template dataset_configurations = template_data["Definition"]["DataSetConfigurations"] @@ -46,22 +61,27 @@ def execute(self): # Read data set into dictionary dataset = None placeholder = di["Placeholder"] - dataset_filename = self._resolve_path(self._intput_dir, DATA_SET_DIR, placeholder + ".json") - with open(dataset_filename, "r") as dataset_file: + dataset_filename = self._resolve_path( + self._intput_dir, DATA_SET_DIR, placeholder + ".json" + ) + with open(dataset_filename) as dataset_file: dataset = json.loads(dataset_file.read()) # replace the blank datasource arn value in the data set dictionaries recursively_replace_value(dataset, "DataSourceArn", self._data_source_arn) # Remove fields that are not allowed - for i in ['OutputColumns', 'ConsumedSpiceCapacityInBytes']: + for i in ["OutputColumns", "ConsumedSpiceCapacityInBytes"]: dataset.pop(i) # Add required fields dataset["AwsAccountId"] = self._aws_account_id - dataset["DataSetId"] = self._resolve_data_set_id_from_placeholder(placeholder=placeholder, - namespace=self._target_namespace) + dataset["DataSetId"] = self._resolve_data_set_id_from_placeholder( + placeholder=placeholder, namespace=self._target_namespace + ) dataset["Name"] = dataset["Name"] - arn, data_set_id = self._create_or_update_data_set(dataset_definition=dataset) + arn, data_set_id = self._create_or_update_data_set( + dataset_definition=dataset + ) def _create_or_update_data_set(self, dataset_definition: dict): """ @@ -73,10 +93,13 @@ def _create_or_update_data_set(self, dataset_definition: dict): response = self._qs_client.create_data_set(**dataset_definition) except self._qs_client.exceptions.ResourceExistsException as e: response = self._qs_client.update_data_set(**dataset_definition) - httpStatus = response['ResponseMetadata']['HTTPStatusCode'] + httpStatus = response["ResponseMetadata"]["HTTPStatusCode"] if httpStatus != 201 and httpStatus != 200: - self._log.error(f"Unexpected response from create_dataset request: {httpStatus} ") + self._log.error( + f"Unexpected response from create_dataset request: {httpStatus} " + ) raise Exception( - f"Unexpected response from trying to create/update dataset : {json.dumps(response, indent=4)} ") + f"Unexpected response from trying to create/update dataset : {json.dumps(response, indent=4)} " + ) else: return response["Arn"], response["DataSetId"] diff --git a/core/operation/publish_dashboard_from_template.py b/core/operation/publish_dashboard_from_template.py index 33a2e71..9ea71c7 100644 --- a/core/operation/publish_dashboard_from_template.py +++ b/core/operation/publish_dashboard_from_template.py @@ -2,7 +2,6 @@ import time from core.operation.baseoperation import BaseOperation -from core.util import recursively_replace_value class PublishDashboardFromTemplateOperation(BaseOperation): @@ -10,21 +9,33 @@ class PublishDashboardFromTemplateOperation(BaseOperation): Publishes Dashboard based on template """ - def __init__(self, template_id: str, target_namespace: str, group_name, *args, **kwargs): + def __init__( + self, template_id: str, target_namespace: str, group_name, *args, **kwargs + ): self._template_id = template_id self._target_namespace = target_namespace self._group_name = group_name - super(PublishDashboardFromTemplateOperation, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def execute(self): # get the template definition - template_def = self._get_template_definition(template_id=self._template_id)["Definition"] + template_def = self._get_template_definition(template_id=self._template_id)[ + "Definition" + ] - desc_template_params = {"AwsAccountId": self._aws_account_id, "TemplateId": self._template_id} + desc_template_params = { + "AwsAccountId": self._aws_account_id, + "TemplateId": self._template_id, + } template = self._qs_client.describe_template(**desc_template_params)["Template"] - namespace_params = {"AwsAccountId": self._aws_account_id, "Namespace": "default"} - namespace_arn = self._qs_client.describe_namespace(**namespace_params)["Namespace"]["Arn"] + namespace_params = { + "AwsAccountId": self._aws_account_id, + "Namespace": "default", + } + namespace_arn = self._qs_client.describe_namespace(**namespace_params)[ + "Namespace" + ]["Arn"] # extract the data source placeholders dashboard_id = self._template_id @@ -33,55 +44,66 @@ def execute(self): "Name": dashboard_id, "DashboardId": dashboard_id, "SourceEntity": { - "SourceTemplate": { - "DataSetReferences": [], - "Arn": template["Arn"] - } + "SourceTemplate": {"DataSetReferences": [], "Arn": template["Arn"]} }, } - ds_references = parameters["SourceEntity"]["SourceTemplate"]["DataSetReferences"] + ds_references = parameters["SourceEntity"]["SourceTemplate"][ + "DataSetReferences" + ] # for each data set config for dsr in template_def["DataSetConfigurations"]: # resolve the dataset arn placeholder = dsr["Placeholder"] - data_set_id = self._resolve_data_set_id_from_placeholder(placeholder=placeholder, - namespace=self._target_namespace) + data_set_id = self._resolve_data_set_id_from_placeholder( + placeholder=placeholder, namespace=self._target_namespace + ) data_set = self._describe_data_set(data_set_id=data_set_id) arn = data_set["Arn"] # associate arn with placeholder key and add to references array - ds_references.append({ - "DataSetPlaceholder": placeholder, - "DataSetArn": arn, - }) + ds_references.append( + { + "DataSetPlaceholder": placeholder, + "DataSetArn": arn, + } + ) # publish dashboard - dashboard_arn, dashboard_id = self._create_or_update_dashboard(dashboard_params=parameters) + dashboard_arn, dashboard_id = self._create_or_update_dashboard( + dashboard_params=parameters + ) # pause for a moment to allow the updates to be processed. time.sleep(3) # Grant permissions # resolve readers group - readers_group_arn = self._qs_client.describe_group(AwsAccountId=self._aws_account_id, Namespace="default", GroupName=self._group_name)["Group"]["Arn"] - - - qs_actions = ["quicksight:DescribeDashboard", - "quicksight:ListDashboardVersions", - "quicksight:QueryDashboard"] + readers_group_arn = self._qs_client.describe_group( + AwsAccountId=self._aws_account_id, + Namespace="default", + GroupName=self._group_name, + )["Group"]["Arn"] + + qs_actions = [ + "quicksight:DescribeDashboard", + "quicksight:ListDashboardVersions", + "quicksight:QueryDashboard", + ] permissions_params = { "AwsAccountId": self._aws_account_id, "DashboardId": self._template_id, - "GrantPermissions": [{ - "Actions": qs_actions, - "Principal": namespace_arn, + "GrantPermissions": [ + { + "Actions": qs_actions, + "Principal": namespace_arn, }, { "Actions": qs_actions, "Principal": readers_group_arn, - }], - } + }, + ], + } response = self._qs_client.update_dashboard_permissions(**permissions_params) @@ -95,10 +117,13 @@ def _create_or_update_dashboard(self, dashboard_params: dict) -> [str, str]: response = self._qs_client.create_dashboard(**dashboard_params) except self._qs_client.exceptions.ResourceExistsException as e: response = self._qs_client.update_dashboard(**dashboard_params) - httpStatus = response['ResponseMetadata']['HTTPStatusCode'] + httpStatus = response["ResponseMetadata"]["HTTPStatusCode"] if httpStatus != 202 and httpStatus != 200: - self._log.error(f"Unexpected response from create_template request: {httpStatus} ") + self._log.error( + f"Unexpected response from create_template request: {httpStatus} " + ) raise Exception( - f"Unexpected response from trying to create/update template : {json.dumps(response, indent=4)} ") + f"Unexpected response from trying to create/update template : {json.dumps(response, indent=4)} " + ) else: return response["Arn"], response["DashboardId"] diff --git a/core/util.py b/core/util.py index 0a51d68..122c2b5 100644 --- a/core/util.py +++ b/core/util.py @@ -29,4 +29,4 @@ def recursively_replace_value(mydict: dict, key: str, val: str): for key2 in mydict: value = mydict[key2] if isinstance(value, dict): - recursively_replace_value(value, key, val) \ No newline at end of file + recursively_replace_value(value, key, val) diff --git a/pyproject.toml b/pyproject.toml index fa2e666..0887c8c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,4 +18,3 @@ pytest= ">=7.2.0" [build-system] requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" - diff --git a/tests/core/operation/analysis_test_responses.py b/tests/core/operation/analysis_test_responses.py index bd64946..39f299f 100644 --- a/tests/core/operation/analysis_test_responses.py +++ b/tests/core/operation/analysis_test_responses.py @@ -5,226 +5,400 @@ def get_analysis_description_response(analysis_id): return { - 'ResponseMetadata': {'RequestId': 'aca2b75d-503b-48c4-b66c-273eafe97a33', - 'HTTPStatusCode': 200, - 'HTTPHeaders': {'date': 'Tue, 05 Sep 2023 19:29:21 GMT', - 'content-type': 'application/json', - 'content-length': '1495', - 'connection': 'keep-alive', - 'x-amzn-requestid': 'aca2b75d-503b-48c4-b66c-273eafe97a33'}, - 'RetryAttempts': 0}, 'Status': 200, - 'Analysis': {'AnalysisId': analysis_id, - 'Arn': 'arn', - 'Name': 'library', 'Status': 'CREATION_SUCCESSFUL', 'DataSetArns': [ - 'arn:aws:quicksight:us-west-2:128682227026:dataset/e9e15c78-0193-4e4c-9a49-ed005569297d', - 'arn:aws:quicksight:us-west-2:128682227026:dataset/86eb4ca5-9552-4ba6-8b1b-7ef1b9b40f78', ], - 'ThemeArn': 'theme-arn', - 'CreatedTime': datetime(2023, 9, 1, 10, 6, 19, 376000, tzinfo=tzlocal()), - 'LastUpdatedTime': datetime(2023, 9, 1, 10, 6, 19, 376000, tzinfo=tzlocal()), - 'Sheets': [ - {'SheetId': '9f2df4a8-21e2-4aa4-adcd-3fb32e86c4ba', - 'Name': 'Circulation Summary'}, - {'SheetId': '341952d3-ece8-4a4b-924c-2d16c905e486', - 'Name': 'Circulation over Time'}, - {'SheetId': '8e9ca074-e043-4e02-be63-649e1ded32e6', - 'Name': 'Loaned Title Use'}]}, - 'RequestId': 'aca2b75d-503b-48c4-b66c-273eafe97a33'} + "ResponseMetadata": { + "RequestId": "aca2b75d-503b-48c4-b66c-273eafe97a33", + "HTTPStatusCode": 200, + "HTTPHeaders": { + "date": "Tue, 05 Sep 2023 19:29:21 GMT", + "content-type": "application/json", + "content-length": "1495", + "connection": "keep-alive", + "x-amzn-requestid": "aca2b75d-503b-48c4-b66c-273eafe97a33", + }, + "RetryAttempts": 0, + }, + "Status": 200, + "Analysis": { + "AnalysisId": analysis_id, + "Arn": "arn", + "Name": "library", + "Status": "CREATION_SUCCESSFUL", + "DataSetArns": [ + "arn:aws:quicksight:us-west-2:128682227026:dataset/e9e15c78-0193-4e4c-9a49-ed005569297d", + "arn:aws:quicksight:us-west-2:128682227026:dataset/86eb4ca5-9552-4ba6-8b1b-7ef1b9b40f78", + ], + "ThemeArn": "theme-arn", + "CreatedTime": datetime(2023, 9, 1, 10, 6, 19, 376000, tzinfo=tzlocal()), + "LastUpdatedTime": datetime( + 2023, 9, 1, 10, 6, 19, 376000, tzinfo=tzlocal() + ), + "Sheets": [ + { + "SheetId": "9f2df4a8-21e2-4aa4-adcd-3fb32e86c4ba", + "Name": "Circulation Summary", + }, + { + "SheetId": "341952d3-ece8-4a4b-924c-2d16c905e486", + "Name": "Circulation over Time", + }, + { + "SheetId": "8e9ca074-e043-4e02-be63-649e1ded32e6", + "Name": "Loaned Title Use", + }, + ], + }, + "RequestId": "aca2b75d-503b-48c4-b66c-273eafe97a33", + } def get_analysis_definition_response(): return { - 'ResponseMetadata': {'RequestId': 'dac531e6-fc4f-41a5-986e-4212c79e9c6c', 'HTTPStatusCode': 200, - 'HTTPHeaders': {'date': 'Tue, 05 Sep 2023 19:47:04 GMT', - 'content-type': 'application/json', 'content-length': '172259', - 'connection': 'keep-alive', - 'x-amzn-requestid': 'dac531e6-fc4f-41a5-986e-4212c79e9c6c'}, - 'RetryAttempts': 0}, 'Status': 200, - 'AnalysisId': 'd954330e-7b80-4a4a-ab64-47f53d8eea38', 'Name': 'library', - 'ResourceStatus': 'CREATION_SUCCESSFUL', - 'ThemeArn': 'theme-arn', - 'Definition': {'DataSetIdentifierDeclarations': [{'Identifier': 'circulation_view', - 'DataSetArn': 'arn:aws:quicksight:us-west-2:128682227026:dataset/e9e15c78-0193-4e4c-9a49-ed005569297d'}, - {'Identifier': 'patron_events', - 'DataSetArn': 'arn:aws:quicksight:us-west-2:128682227026:dataset/86eb4ca5-9552-4ba6-8b1b-7ef1b9b40f78'}], - 'Sheets': [], - 'ColumnConfigurations': [], - 'AnalysisDefaults': {'DefaultNewSheetConfiguration': { - 'InteractiveLayoutConfiguration': {'Grid': {'CanvasSizeOptions': { - 'ScreenCanvasSizeOptions': {'ResizeOption': 'FIXED', - 'OptimizedViewPortWidth': '1600px'}}}}, - 'SheetContentType': 'INTERACTIVE'}}}, 'RequestId': 'dac531e6-fc4f-41a5-986e-4212c79e9c6c'} + "ResponseMetadata": { + "RequestId": "dac531e6-fc4f-41a5-986e-4212c79e9c6c", + "HTTPStatusCode": 200, + "HTTPHeaders": { + "date": "Tue, 05 Sep 2023 19:47:04 GMT", + "content-type": "application/json", + "content-length": "172259", + "connection": "keep-alive", + "x-amzn-requestid": "dac531e6-fc4f-41a5-986e-4212c79e9c6c", + }, + "RetryAttempts": 0, + }, + "Status": 200, + "AnalysisId": "d954330e-7b80-4a4a-ab64-47f53d8eea38", + "Name": "library", + "ResourceStatus": "CREATION_SUCCESSFUL", + "ThemeArn": "theme-arn", + "Definition": { + "DataSetIdentifierDeclarations": [ + { + "Identifier": "circulation_view", + "DataSetArn": "arn:aws:quicksight:us-west-2:128682227026:dataset/e9e15c78-0193-4e4c-9a49-ed005569297d", + }, + { + "Identifier": "patron_events", + "DataSetArn": "arn:aws:quicksight:us-west-2:128682227026:dataset/86eb4ca5-9552-4ba6-8b1b-7ef1b9b40f78", + }, + ], + "Sheets": [], + "ColumnConfigurations": [], + "AnalysisDefaults": { + "DefaultNewSheetConfiguration": { + "InteractiveLayoutConfiguration": { + "Grid": { + "CanvasSizeOptions": { + "ScreenCanvasSizeOptions": { + "ResizeOption": "FIXED", + "OptimizedViewPortWidth": "1600px", + } + } + } + }, + "SheetContentType": "INTERACTIVE", + } + }, + }, + "RequestId": "dac531e6-fc4f-41a5-986e-4212c79e9c6c", + } def create_template_response(): - return {'ResponseMetadata': {'RequestId': '55aba51c-0052-4311-8fb3-b9433f0041da', 'HTTPStatusCode': 202, - 'HTTPHeaders': {'date': 'Tue, 05 Sep 2023 21:52:26 GMT', - 'content-type': 'application/json', 'content-length': '293', - 'connection': 'keep-alive', - 'x-amzn-requestid': '55aba51c-0052-4311-8fb3-b9433f0041da'}, - 'RetryAttempts': 0}, 'Status': 202, 'TemplateId': 'library-template', - 'Arn': 'arn:aws:quicksight:us-west-2:128682227026:template/library-template', - 'VersionArn': 'arn:aws:quicksight:us-west-2:128682227026:template/library-template/version/9', - 'CreationStatus': 'CREATION_IN_PROGRESS', 'RequestId': '55aba51c-0052-4311-8fb3-b9433f0041da'} + return { + "ResponseMetadata": { + "RequestId": "55aba51c-0052-4311-8fb3-b9433f0041da", + "HTTPStatusCode": 202, + "HTTPHeaders": { + "date": "Tue, 05 Sep 2023 21:52:26 GMT", + "content-type": "application/json", + "content-length": "293", + "connection": "keep-alive", + "x-amzn-requestid": "55aba51c-0052-4311-8fb3-b9433f0041da", + }, + "RetryAttempts": 0, + }, + "Status": 202, + "TemplateId": "library-template", + "Arn": "arn:aws:quicksight:us-west-2:128682227026:template/library-template", + "VersionArn": "arn:aws:quicksight:us-west-2:128682227026:template/library-template/version/9", + "CreationStatus": "CREATION_IN_PROGRESS", + "RequestId": "55aba51c-0052-4311-8fb3-b9433f0041da", + } def create_template_parameters(aws_account_id: str): - {'AwsAccountId': aws_account_id, 'TemplateId': 'library-template', 'Name': 'library', 'SourceEntity': { - 'SourceAnalysis': { - 'Arn': 'arn:aws:quicksight:us-west-2:128682227026:analysis/d954330e-7b80-4a4a-ab64-47f53d8eea38', - 'DataSetReferences': [{'DataSetPlaceholder': 'circulation_view', - 'DataSetArn': 'arn:aws:quicksight:us-west-2:128682227026:dataset/e9e15c78-0193-4e4c-9a49-ed005569297d'}, - {'DataSetPlaceholder': 'patron_events', - 'DataSetArn': 'ds_2_arn'}]}}} + { + "AwsAccountId": aws_account_id, + "TemplateId": "library-template", + "Name": "library", + "SourceEntity": { + "SourceAnalysis": { + "Arn": "arn:aws:quicksight:us-west-2:128682227026:analysis/d954330e-7b80-4a4a-ab64-47f53d8eea38", + "DataSetReferences": [ + { + "DataSetPlaceholder": "circulation_view", + "DataSetArn": "arn:aws:quicksight:us-west-2:128682227026:dataset/e9e15c78-0193-4e4c-9a49-ed005569297d", + }, + {"DataSetPlaceholder": "patron_events", "DataSetArn": "ds_2_arn"}, + ], + } + }, + } def describe_template_definition_response(): - return {'ResponseMetadata': {'RequestId': 'c9ebc247-e7f7-4140-913c-2a1288ff43f6', 'HTTPStatusCode': 200, - 'HTTPHeaders': {'date': 'Tue, 05 Sep 2023 21:59:51 GMT', - 'content-type': 'application/json', 'content-length': '173310', - 'connection': 'keep-alive', - 'x-amzn-requestid': 'c9ebc247-e7f7-4140-913c-2a1288ff43f6'}, - 'RetryAttempts': 0}, 'Status': 200, 'Name': 'library', - 'TemplateId': 'library-template', - 'ResourceStatus': 'CREATION_SUCCESSFUL', - 'ThemeArn': 'arn:aws:quicksight:us-west-2:128682227026:theme/5f5e7417-a800-4812-9e59-dc44e0580412', - 'Definition': {'DataSetConfigurations': [{'Placeholder': 'circulation_view', 'DataSetSchema': { - 'ColumnSchemaList': [{'Name': 'fiction', 'DataType': 'INTEGER'}, - {'Name': 'location', 'DataType': 'STRING'}, - {'Name': 'event_type', 'DataType': 'STRING'}, - {'Name': 'audience', 'DataType': 'STRING'}, - {'Name': 'medium', 'DataType': 'STRING'}, - {'Name': 'time_stamp', 'DataType': 'DATETIME'}, - {'Name': 'distributor', 'DataType': 'STRING'}, - {'Name': 'author', 'DataType': 'STRING'}, - {'Name': 'libary_short_name', 'DataType': 'STRING'}, - {'Name': 'title', 'DataType': 'STRING'}, - {'Name': 'open_access', 'DataType': 'INTEGER'}, - {'Name': 'collection_name', 'DataType': 'STRING'}, - {'Name': 'genre', 'DataType': 'STRING'}, - {'Name': 'library_name', 'DataType': 'STRING'}]}, 'ColumnGroupSchemaList': []}, - {'Placeholder': 'patron_events', 'DataSetSchema': { - 'ColumnSchemaList': [ - {'Name': 'location', 'DataType': 'STRING'}, - {'Name': 'event_type', 'DataType': 'STRING'}, - {'Name': 'time_stamp', - 'DataType': 'DATETIME'}, - {'Name': 'library_name', - 'DataType': 'STRING'}]}, - 'ColumnGroupSchemaList': []}], }, - } + return { + "ResponseMetadata": { + "RequestId": "c9ebc247-e7f7-4140-913c-2a1288ff43f6", + "HTTPStatusCode": 200, + "HTTPHeaders": { + "date": "Tue, 05 Sep 2023 21:59:51 GMT", + "content-type": "application/json", + "content-length": "173310", + "connection": "keep-alive", + "x-amzn-requestid": "c9ebc247-e7f7-4140-913c-2a1288ff43f6", + }, + "RetryAttempts": 0, + }, + "Status": 200, + "Name": "library", + "TemplateId": "library-template", + "ResourceStatus": "CREATION_SUCCESSFUL", + "ThemeArn": "arn:aws:quicksight:us-west-2:128682227026:theme/5f5e7417-a800-4812-9e59-dc44e0580412", + "Definition": { + "DataSetConfigurations": [ + { + "Placeholder": "circulation_view", + "DataSetSchema": { + "ColumnSchemaList": [ + {"Name": "fiction", "DataType": "INTEGER"}, + {"Name": "location", "DataType": "STRING"}, + {"Name": "event_type", "DataType": "STRING"}, + {"Name": "audience", "DataType": "STRING"}, + {"Name": "medium", "DataType": "STRING"}, + {"Name": "time_stamp", "DataType": "DATETIME"}, + {"Name": "distributor", "DataType": "STRING"}, + {"Name": "author", "DataType": "STRING"}, + {"Name": "libary_short_name", "DataType": "STRING"}, + {"Name": "title", "DataType": "STRING"}, + {"Name": "open_access", "DataType": "INTEGER"}, + {"Name": "collection_name", "DataType": "STRING"}, + {"Name": "genre", "DataType": "STRING"}, + {"Name": "library_name", "DataType": "STRING"}, + ] + }, + "ColumnGroupSchemaList": [], + }, + { + "Placeholder": "patron_events", + "DataSetSchema": { + "ColumnSchemaList": [ + {"Name": "location", "DataType": "STRING"}, + {"Name": "event_type", "DataType": "STRING"}, + {"Name": "time_stamp", "DataType": "DATETIME"}, + {"Name": "library_name", "DataType": "STRING"}, + ] + }, + "ColumnGroupSchemaList": [], + }, + ], + }, + } def describe_data_set_1_response(): - return {'ResponseMetadata': {'RequestId': '5c7285af-6ea4-4192-af69-8f8093785112', 'HTTPStatusCode': 200, - 'HTTPHeaders': {'date': 'Wed, 06 Sep 2023 00:02:50 GMT', - 'content-type': 'application/json', 'content-length': '4642', - 'connection': 'keep-alive', - 'x-amzn-requestid': '5c7285af-6ea4-4192-af69-8f8093785112'}, - 'RetryAttempts': 0}, 'Status': 200, - 'DataSet': {'Arn': 'arn:aws:quicksight:us-west-2:128682227026:dataset/e9e15c78-0193-4e4c-9a49-ed005569297d', - 'DataSetId': 'e9e15c78-0193-4e4c-9a49-ed005569297d', 'Name': 'circulation_events_view', - 'CreatedTime': datetime(2023, 2, 28, 13, 39, 33, 923000, tzinfo=tzlocal()), - 'LastUpdatedTime': datetime(2023, 6, 7, 10, 5, 32, 640000, tzinfo=tzlocal()), - 'PhysicalTableMap': {'25046cd8-e08f-41e0-8af8-5259b64499fd': {'CustomSql': { - 'DataSourceArn': 'arn:aws:quicksight:us-west-2:128682227026:datasource/a4e44abb-c1fd-4b5a-be3f-daca72d50e0a', - 'Name': 'circulation_events_view', - 'SqlQuery': 'select \n ce.time_stamp, \n l.short_name as libary_short_name, \n l.name as library_name,\n l.location as location,\n et.name as event_type, \n i.identifier,\n it.name as identifier_type,\n c.name as collection_name, \n ce.title, \n ce.author,\n ce.audience,\n ce.publisher,\n ce.language,\n ce.genre,\n ce.open_access,\n ce.fiction,\n ce.distributor,\n ce.medium\nfrom \n circulation_events ce,\n libraries l,\n collections c,\n circulation_event_types et,\n identifiers i,\n identifier_types it\nwhere \n ce.library_id = l.id and\n ce.event_type_id = et.id and\n ce.collection_id = c.id and\n ce.identifier_id = i.id and \n i.identifier_type_id = it.id', - 'Columns': [{'Name': 'time_stamp', 'Type': 'DATETIME'}, - {'Name': 'libary_short_name', 'Type': 'STRING'}, - {'Name': 'library_name', 'Type': 'STRING'}, - {'Name': 'location', 'Type': 'STRING'}, - {'Name': 'event_type', 'Type': 'STRING'}, - {'Name': 'identifier', 'Type': 'STRING'}, - {'Name': 'identifier_type', 'Type': 'STRING'}, - {'Name': 'collection_name', 'Type': 'STRING'}, - {'Name': 'title', 'Type': 'STRING'}, {'Name': 'author', 'Type': 'STRING'}, - {'Name': 'audience', 'Type': 'STRING'}, {'Name': 'publisher', 'Type': 'STRING'}, - {'Name': 'language', 'Type': 'STRING'}, {'Name': 'genre', 'Type': 'STRING'}, - {'Name': 'open_access', 'Type': 'BIT'}, {'Name': 'fiction', 'Type': 'BIT'}, - {'Name': 'distributor', 'Type': 'STRING'}, - {'Name': 'medium', 'Type': 'STRING'}]}}}, 'LogicalTableMap': { - '6c80275e-d03d-417c-a8cd-57d93e58129b': {'Alias': 'circulation_events_view', 'DataTransforms': [{ - 'ProjectOperation': { - 'ProjectedColumns': [ - 'time_stamp', - 'libary_short_name', - 'library_name', - 'location', - 'event_type', - 'identifier', - 'identifier_type', - 'collection_name', - 'title', - 'author', - 'audience', - 'publisher', - 'language', - 'genre', - 'open_access', - 'fiction', - 'distributor', - 'medium']}}], - 'Source': { - 'PhysicalTableId': '25046cd8-e08f-41e0-8af8-5259b64499fd'}}}, - 'OutputColumns': [{'Name': 'time_stamp', 'Type': 'DATETIME'}, - {'Name': 'libary_short_name', 'Type': 'STRING'}, - {'Name': 'library_name', 'Type': 'STRING'}, - {'Name': 'location', 'Type': 'STRING'}, - {'Name': 'event_type', 'Type': 'STRING'}, - {'Name': 'identifier', 'Type': 'STRING'}, - {'Name': 'identifier_type', 'Type': 'STRING'}, - {'Name': 'collection_name', 'Type': 'STRING'}, - {'Name': 'title', 'Type': 'STRING'}, {'Name': 'author', 'Type': 'STRING'}, - {'Name': 'audience', 'Type': 'STRING'}, - {'Name': 'publisher', 'Type': 'STRING'}, - {'Name': 'language', 'Type': 'STRING'}, {'Name': 'genre', 'Type': 'STRING'}, - {'Name': 'open_access', 'Type': 'INTEGER'}, - {'Name': 'fiction', 'Type': 'INTEGER'}, - {'Name': 'distributor', 'Type': 'STRING'}, - {'Name': 'medium', 'Type': 'STRING'}], 'ImportMode': 'DIRECT_QUERY', - 'ConsumedSpiceCapacityInBytes': 0, 'FieldFolders': {}, - 'DataSetUsageConfiguration': {'DisableUseAsDirectQuerySource': False, - 'DisableUseAsImportedSource': False}}, - 'RequestId': '5c7285af-6ea4-4192-af69-8f8093785112'} + return { + "ResponseMetadata": { + "RequestId": "5c7285af-6ea4-4192-af69-8f8093785112", + "HTTPStatusCode": 200, + "HTTPHeaders": { + "date": "Wed, 06 Sep 2023 00:02:50 GMT", + "content-type": "application/json", + "content-length": "4642", + "connection": "keep-alive", + "x-amzn-requestid": "5c7285af-6ea4-4192-af69-8f8093785112", + }, + "RetryAttempts": 0, + }, + "Status": 200, + "DataSet": { + "Arn": "arn:aws:quicksight:us-west-2:128682227026:dataset/e9e15c78-0193-4e4c-9a49-ed005569297d", + "DataSetId": "e9e15c78-0193-4e4c-9a49-ed005569297d", + "Name": "circulation_events_view", + "CreatedTime": datetime(2023, 2, 28, 13, 39, 33, 923000, tzinfo=tzlocal()), + "LastUpdatedTime": datetime( + 2023, 6, 7, 10, 5, 32, 640000, tzinfo=tzlocal() + ), + "PhysicalTableMap": { + "25046cd8-e08f-41e0-8af8-5259b64499fd": { + "CustomSql": { + "DataSourceArn": "arn:aws:quicksight:us-west-2:128682227026:datasource/a4e44abb-c1fd-4b5a-be3f-daca72d50e0a", + "Name": "circulation_events_view", + "SqlQuery": "select \n ce.time_stamp, \n l.short_name as libary_short_name, \n l.name as library_name,\n l.location as location,\n et.name as event_type, \n i.identifier,\n it.name as identifier_type,\n c.name as collection_name, \n ce.title, \n ce.author,\n ce.audience,\n ce.publisher,\n ce.language,\n ce.genre,\n ce.open_access,\n ce.fiction,\n ce.distributor,\n ce.medium\nfrom \n circulation_events ce,\n libraries l,\n collections c,\n circulation_event_types et,\n identifiers i,\n identifier_types it\nwhere \n ce.library_id = l.id and\n ce.event_type_id = et.id and\n ce.collection_id = c.id and\n ce.identifier_id = i.id and \n i.identifier_type_id = it.id", + "Columns": [ + {"Name": "time_stamp", "Type": "DATETIME"}, + {"Name": "libary_short_name", "Type": "STRING"}, + {"Name": "library_name", "Type": "STRING"}, + {"Name": "location", "Type": "STRING"}, + {"Name": "event_type", "Type": "STRING"}, + {"Name": "identifier", "Type": "STRING"}, + {"Name": "identifier_type", "Type": "STRING"}, + {"Name": "collection_name", "Type": "STRING"}, + {"Name": "title", "Type": "STRING"}, + {"Name": "author", "Type": "STRING"}, + {"Name": "audience", "Type": "STRING"}, + {"Name": "publisher", "Type": "STRING"}, + {"Name": "language", "Type": "STRING"}, + {"Name": "genre", "Type": "STRING"}, + {"Name": "open_access", "Type": "BIT"}, + {"Name": "fiction", "Type": "BIT"}, + {"Name": "distributor", "Type": "STRING"}, + {"Name": "medium", "Type": "STRING"}, + ], + } + } + }, + "LogicalTableMap": { + "6c80275e-d03d-417c-a8cd-57d93e58129b": { + "Alias": "circulation_events_view", + "DataTransforms": [ + { + "ProjectOperation": { + "ProjectedColumns": [ + "time_stamp", + "libary_short_name", + "library_name", + "location", + "event_type", + "identifier", + "identifier_type", + "collection_name", + "title", + "author", + "audience", + "publisher", + "language", + "genre", + "open_access", + "fiction", + "distributor", + "medium", + ] + } + } + ], + "Source": { + "PhysicalTableId": "25046cd8-e08f-41e0-8af8-5259b64499fd" + }, + } + }, + "OutputColumns": [ + {"Name": "time_stamp", "Type": "DATETIME"}, + {"Name": "libary_short_name", "Type": "STRING"}, + {"Name": "library_name", "Type": "STRING"}, + {"Name": "location", "Type": "STRING"}, + {"Name": "event_type", "Type": "STRING"}, + {"Name": "identifier", "Type": "STRING"}, + {"Name": "identifier_type", "Type": "STRING"}, + {"Name": "collection_name", "Type": "STRING"}, + {"Name": "title", "Type": "STRING"}, + {"Name": "author", "Type": "STRING"}, + {"Name": "audience", "Type": "STRING"}, + {"Name": "publisher", "Type": "STRING"}, + {"Name": "language", "Type": "STRING"}, + {"Name": "genre", "Type": "STRING"}, + {"Name": "open_access", "Type": "INTEGER"}, + {"Name": "fiction", "Type": "INTEGER"}, + {"Name": "distributor", "Type": "STRING"}, + {"Name": "medium", "Type": "STRING"}, + ], + "ImportMode": "DIRECT_QUERY", + "ConsumedSpiceCapacityInBytes": 0, + "FieldFolders": {}, + "DataSetUsageConfiguration": { + "DisableUseAsDirectQuerySource": False, + "DisableUseAsImportedSource": False, + }, + }, + "RequestId": "5c7285af-6ea4-4192-af69-8f8093785112", + } def describe_data_set_2_response(): - return {'ResponseMetadata': {'RequestId': '3e6ad967-c44d-4a86-8391-be51ebf978c5', 'HTTPStatusCode': 200, - 'HTTPHeaders': {'date': 'Wed, 06 Sep 2023 00:07:58 GMT', - 'content-type': 'application/json', 'content-length': '2564', - 'connection': 'keep-alive', - 'x-amzn-requestid': '3e6ad967-c44d-4a86-8391-be51ebf978c5'}, - 'RetryAttempts': 0}, 'Status': 200, - 'DataSet': {'Arn': 'arn:aws:quicksight:us-west-2:128682227026:dataset/86eb4ca5-9552-4ba6-8b1b-7ef1b9b40f78', - 'DataSetId': '86eb4ca5-9552-4ba6-8b1b-7ef1b9b40f78', 'Name': 'patron_events', - 'CreatedTime': datetime(2023, 2, 28, 16, 8, 15, 620000, tzinfo=tzlocal()), - 'LastUpdatedTime': datetime(2023, 3, 1, 8, 28, 1, 477000, tzinfo=tzlocal()), - 'PhysicalTableMap': {'50873ea6-0c3a-4989-97e1-eb740e8a3348': {'CustomSql': { - 'DataSourceArn': 'arn:aws:quicksight:us-west-2:128682227026:datasource/a4e44abb-c1fd-4b5a-be3f-daca72d50e0a', - 'Name': 'patron_events', - 'SqlQuery': 'select \n pe.time_stamp, \n l.short_name as library_short_name, \n l.name as library_name, \n l.location, \n l.state, \n ev.name as event_type \nfrom \n patron_events pe, \n libraries l, \n circulation_event_types ev \nwhere \n pe.library_id = l.id and \n pe.event_type_id = ev.id', - 'Columns': [{'Name': 'time_stamp', 'Type': 'DATETIME'}, - {'Name': 'library_short_name', 'Type': 'STRING'}, - {'Name': 'library_name', 'Type': 'STRING'}, - {'Name': 'location', 'Type': 'STRING'}, {'Name': 'state', 'Type': 'STRING'}, - {'Name': 'event_type', 'Type': 'STRING'}]}}}, 'LogicalTableMap': { - '4dc4e51c-76b2-4595-8b3b-1759f76a05c4': {'Alias': 'patron_events', 'DataTransforms': [{ - 'ProjectOperation': { - 'ProjectedColumns': [ - 'time_stamp', - 'library_short_name', - 'library_name', - 'location', - 'state', - 'event_type']}}], - 'Source': { - 'PhysicalTableId': '50873ea6-0c3a-4989-97e1-eb740e8a3348'}}}, - 'OutputColumns': [{'Name': 'time_stamp', 'Type': 'DATETIME'}, - {'Name': 'library_short_name', 'Type': 'STRING'}, - {'Name': 'library_name', 'Type': 'STRING'}, - {'Name': 'location', 'Type': 'STRING'}, {'Name': 'state', 'Type': 'STRING'}, - {'Name': 'event_type', 'Type': 'STRING'}], 'ImportMode': 'DIRECT_QUERY', - 'ConsumedSpiceCapacityInBytes': 0, 'FieldFolders': {}, - 'DataSetUsageConfiguration': {'DisableUseAsDirectQuerySource': False, - 'DisableUseAsImportedSource': False}}, - 'RequestId': '3e6ad967-c44d-4a86-8391-be51ebf978c5'} + return { + "ResponseMetadata": { + "RequestId": "3e6ad967-c44d-4a86-8391-be51ebf978c5", + "HTTPStatusCode": 200, + "HTTPHeaders": { + "date": "Wed, 06 Sep 2023 00:07:58 GMT", + "content-type": "application/json", + "content-length": "2564", + "connection": "keep-alive", + "x-amzn-requestid": "3e6ad967-c44d-4a86-8391-be51ebf978c5", + }, + "RetryAttempts": 0, + }, + "Status": 200, + "DataSet": { + "Arn": "arn:aws:quicksight:us-west-2:128682227026:dataset/86eb4ca5-9552-4ba6-8b1b-7ef1b9b40f78", + "DataSetId": "86eb4ca5-9552-4ba6-8b1b-7ef1b9b40f78", + "Name": "patron_events", + "CreatedTime": datetime(2023, 2, 28, 16, 8, 15, 620000, tzinfo=tzlocal()), + "LastUpdatedTime": datetime(2023, 3, 1, 8, 28, 1, 477000, tzinfo=tzlocal()), + "PhysicalTableMap": { + "50873ea6-0c3a-4989-97e1-eb740e8a3348": { + "CustomSql": { + "DataSourceArn": "arn:aws:quicksight:us-west-2:128682227026:datasource/a4e44abb-c1fd-4b5a-be3f-daca72d50e0a", + "Name": "patron_events", + "SqlQuery": "select \n pe.time_stamp, \n l.short_name as library_short_name, \n l.name as library_name, \n l.location, \n l.state, \n ev.name as event_type \nfrom \n patron_events pe, \n libraries l, \n circulation_event_types ev \nwhere \n pe.library_id = l.id and \n pe.event_type_id = ev.id", + "Columns": [ + {"Name": "time_stamp", "Type": "DATETIME"}, + {"Name": "library_short_name", "Type": "STRING"}, + {"Name": "library_name", "Type": "STRING"}, + {"Name": "location", "Type": "STRING"}, + {"Name": "state", "Type": "STRING"}, + {"Name": "event_type", "Type": "STRING"}, + ], + } + } + }, + "LogicalTableMap": { + "4dc4e51c-76b2-4595-8b3b-1759f76a05c4": { + "Alias": "patron_events", + "DataTransforms": [ + { + "ProjectOperation": { + "ProjectedColumns": [ + "time_stamp", + "library_short_name", + "library_name", + "location", + "state", + "event_type", + ] + } + } + ], + "Source": { + "PhysicalTableId": "50873ea6-0c3a-4989-97e1-eb740e8a3348" + }, + } + }, + "OutputColumns": [ + {"Name": "time_stamp", "Type": "DATETIME"}, + {"Name": "library_short_name", "Type": "STRING"}, + {"Name": "library_name", "Type": "STRING"}, + {"Name": "location", "Type": "STRING"}, + {"Name": "state", "Type": "STRING"}, + {"Name": "event_type", "Type": "STRING"}, + ], + "ImportMode": "DIRECT_QUERY", + "ConsumedSpiceCapacityInBytes": 0, + "FieldFolders": {}, + "DataSetUsageConfiguration": { + "DisableUseAsDirectQuerySource": False, + "DisableUseAsImportedSource": False, + }, + }, + "RequestId": "3e6ad967-c44d-4a86-8391-be51ebf978c5", + } diff --git a/tests/core/operation/test_export_analysis_operation.py b/tests/core/operation/test_export_analysis_operation.py index 80c142d..48cc11b 100644 --- a/tests/core/operation/test_export_analysis_operation.py +++ b/tests/core/operation/test_export_analysis_operation.py @@ -1,59 +1,101 @@ import os -from datetime import datetime import botocore from botocore.stub import Stubber -from dateutil.tz import tzlocal from core.operation.export_analysis_operation import ExportAnalysisOperation -from tests.core.operation.analysis_test_responses import get_analysis_description_response, \ - get_analysis_definition_response, create_template_response, describe_template_definition_response, \ - describe_data_set_2_response, describe_data_set_1_response +from tests.core.operation.analysis_test_responses import ( + create_template_response, describe_data_set_1_response, + describe_data_set_2_response, describe_template_definition_response, + get_analysis_definition_response, get_analysis_description_response) class TestExportAnalysisOperation: - def test(self): analysis_id = "my-quicksight-analysis-id" output_dir = "/tmp/test-output" account = "012345678910" - qs_client = botocore.session.get_session().create_client('quicksight') + qs_client = botocore.session.get_session().create_client("quicksight") with Stubber(qs_client) as stub: - analysis_description_params = {'AwsAccountId': account, 'AnalysisId': analysis_id} - - stub.add_response('describe_analysis', service_response=get_analysis_description_response(analysis_id), - expected_params=analysis_description_params) - - stub.add_response('describe_analysis_definition', service_response=get_analysis_definition_response(), - expected_params=analysis_description_params) - - create_template_params = {'AwsAccountId': account, 'TemplateId': 'library-template', 'Name': 'library', - 'SourceEntity': { - 'SourceAnalysis': { - 'Arn': 'arn', - 'DataSetReferences': [{'DataSetPlaceholder': 'circulation_view', - 'DataSetArn': 'arn:aws:quicksight:us-west-2:128682227026:dataset/e9e15c78-0193-4e4c-9a49-ed005569297d'}, - {'DataSetPlaceholder': 'patron_events', - 'DataSetArn': 'arn:aws:quicksight:us-west-2:128682227026:dataset/86eb4ca5-9552-4ba6-8b1b-7ef1b9b40f78'}]}}} - - stub.add_response('create_template', service_response=create_template_response(), - expected_params=create_template_params) - - stub.add_response('describe_template_definition', service_response=describe_template_definition_response(), - expected_params={ - 'AwsAccountId': account, 'TemplateId': 'library-template', 'AliasName': "$LATEST", }) - - stub.add_response('describe_data_set', service_response=describe_data_set_1_response(), expected_params={ - 'AwsAccountId': account, 'DataSetId': 'e9e15c78-0193-4e4c-9a49-ed005569297d', - }) - - stub.add_response('describe_data_set', service_response=describe_data_set_2_response(), expected_params={ - 'AwsAccountId': account, 'DataSetId': '86eb4ca5-9552-4ba6-8b1b-7ef1b9b40f78', - }) - - op = ExportAnalysisOperation(qs_client=qs_client, analysis_id=analysis_id, output_dir=output_dir, - aws_account_id=account) + analysis_description_params = { + "AwsAccountId": account, + "AnalysisId": analysis_id, + } + + stub.add_response( + "describe_analysis", + service_response=get_analysis_description_response(analysis_id), + expected_params=analysis_description_params, + ) + + stub.add_response( + "describe_analysis_definition", + service_response=get_analysis_definition_response(), + expected_params=analysis_description_params, + ) + + create_template_params = { + "AwsAccountId": account, + "TemplateId": "library-template", + "Name": "library", + "SourceEntity": { + "SourceAnalysis": { + "Arn": "arn", + "DataSetReferences": [ + { + "DataSetPlaceholder": "circulation_view", + "DataSetArn": "arn:aws:quicksight:us-west-2:128682227026:dataset/e9e15c78-0193-4e4c-9a49-ed005569297d", + }, + { + "DataSetPlaceholder": "patron_events", + "DataSetArn": "arn:aws:quicksight:us-west-2:128682227026:dataset/86eb4ca5-9552-4ba6-8b1b-7ef1b9b40f78", + }, + ], + } + }, + } + + stub.add_response( + "create_template", + service_response=create_template_response(), + expected_params=create_template_params, + ) + + stub.add_response( + "describe_template_definition", + service_response=describe_template_definition_response(), + expected_params={ + "AwsAccountId": account, + "TemplateId": "library-template", + "AliasName": "$LATEST", + }, + ) + + stub.add_response( + "describe_data_set", + service_response=describe_data_set_1_response(), + expected_params={ + "AwsAccountId": account, + "DataSetId": "e9e15c78-0193-4e4c-9a49-ed005569297d", + }, + ) + + stub.add_response( + "describe_data_set", + service_response=describe_data_set_2_response(), + expected_params={ + "AwsAccountId": account, + "DataSetId": "86eb4ca5-9552-4ba6-8b1b-7ef1b9b40f78", + }, + ) + + op = ExportAnalysisOperation( + qs_client=qs_client, + analysis_id=analysis_id, + output_dir=output_dir, + aws_account_id=account, + ) op.execute() @@ -63,5 +105,12 @@ def test(self): template_file = os.path.join(templates_dir, "library.json") patron_events_file = os.path.join(data_sets_dir, "patron_events.json") circulation_events_file = os.path.join(data_sets_dir, "circulation_view.json") - for p in [assets_dir, data_sets_dir, templates_dir, template_file, patron_events_file, circulation_events_file]: + for p in [ + assets_dir, + data_sets_dir, + templates_dir, + template_file, + patron_events_file, + circulation_events_file, + ]: assert os.path.exists(p) diff --git a/tests/core/test_cli.py b/tests/core/test_cli.py index 94a2e6c..c42796a 100644 --- a/tests/core/test_cli.py +++ b/tests/core/test_cli.py @@ -2,9 +2,10 @@ from core.cli import export_analysis + class TestCli: def test_export_analysis_help(self): - runner = CliRunner() - result = runner.invoke(export_analysis, ['--help']) - assert result.exit_code == 0 - assert "Exports a template" in result.output + runner = CliRunner() + result = runner.invoke(export_analysis, ["--help"]) + assert result.exit_code == 0 + assert "Exports a template" in result.output From 5dbf298dcdca77459346d09cf802d7dc8db4d526 Mon Sep 17 00:00:00 2001 From: Daniel Bernstein Date: Wed, 6 Sep 2023 09:23:10 -0700 Subject: [PATCH 11/30] Add ci group back into the toml. --- .github/workflows/lint.yml | 2 +- .github/workflows/mypy.yml | 2 +- .github/workflows/test-build.yml | 2 +- poetry.lock | 511 ++++++++++++++++++++++++++++++- pyproject.toml | 7 + 5 files changed, 520 insertions(+), 4 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index ce97af7..c06bb78 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -23,7 +23,7 @@ jobs: - name: Install Pre-commit run: | - poetry install + poetry install --only ci env: POETRY_VIRTUALENVS_CREATE: false diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml index 3d7489a..beca934 100644 --- a/.github/workflows/mypy.yml +++ b/.github/workflows/mypy.yml @@ -26,7 +26,7 @@ jobs: sudo apt-get install --yes libxmlsec1-dev libxml2-dev - name: Install Python Packages 📦 - run: poetry install + run: poetry install --without ci - name: Run MyPy 🪄 run: poetry run mypy diff --git a/.github/workflows/test-build.yml b/.github/workflows/test-build.yml index fe8005e..cddb3fb 100644 --- a/.github/workflows/test-build.yml +++ b/.github/workflows/test-build.yml @@ -45,7 +45,7 @@ jobs: - name: Install Tox run: | - poetry install + poetry install --only ci env: POETRY_VIRTUALENVS_CREATE: false diff --git a/poetry.lock b/poetry.lock index 26feec1..f5358ae 100644 --- a/poetry.lock +++ b/poetry.lock @@ -38,6 +38,134 @@ urllib3 = ">=1.25.4,<1.27" [package.extras] crt = ["awscrt (==0.16.26)"] +[[package]] +name = "cachetools" +version = "5.3.1" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.1-py3-none-any.whl", hash = "sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590"}, + {file = "cachetools-5.3.1.tar.gz", hash = "sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b"}, +] + +[[package]] +name = "certifi" +version = "2023.7.22" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, + {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + +[[package]] +name = "chardet" +version = "5.2.0" +description = "Universal encoding detector for Python 3" +optional = false +python-versions = ">=3.7" +files = [ + {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, + {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.2.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, + {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, +] + [[package]] name = "click" version = "8.1.7" @@ -63,6 +191,52 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "distlib" +version = "0.3.7" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, + {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, +] + +[[package]] +name = "docker" +version = "6.1.3" +description = "A Python library for the Docker Engine API." +optional = false +python-versions = ">=3.7" +files = [ + {file = "docker-6.1.3-py3-none-any.whl", hash = "sha256:aecd2277b8bf8e506e484f6ab7aec39abe0038e29fa4a6d3ba86c3fe01844ed9"}, + {file = "docker-6.1.3.tar.gz", hash = "sha256:aa6d17830045ba5ef0168d5eaa34d37beeb113948c413affe1d5991fc11f9a20"}, +] + +[package.dependencies] +packaging = ">=14.0" +pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} +requests = ">=2.26.0" +urllib3 = ">=1.26.0" +websocket-client = ">=0.32.0" + +[package.extras] +ssh = ["paramiko (>=2.4.3)"] + +[[package]] +name = "dunamai" +version = "1.18.0" +description = "Dynamic version generation" +optional = false +python-versions = ">=3.5,<4.0" +files = [ + {file = "dunamai-1.18.0-py3-none-any.whl", hash = "sha256:f9284a9f4048f0b809d11539896e78bde94c05b091b966a04a44ab4c48df03ce"}, + {file = "dunamai-1.18.0.tar.gz", hash = "sha256:5200598561ea5ba956a6174c36e402e92206c6a6aa4a93a6c5cb8003ee1e0997"}, +] + +[package.dependencies] +packaging = ">=20.9" + [[package]] name = "exceptiongroup" version = "1.1.3" @@ -77,6 +251,49 @@ files = [ [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "filelock" +version = "3.12.3" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.12.3-py3-none-any.whl", hash = "sha256:f067e40ccc40f2b48395a80fcbd4728262fab54e232e090a4063ab804179efeb"}, + {file = "filelock-3.12.3.tar.gz", hash = "sha256:0ecc1dd2ec4672a10c8550a8182f1bd0c0a5088470ecd5a125e45f49472fac3d"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.7.1", markers = "python_version < \"3.11\""} + +[package.extras] +docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=2.1)"] + +[[package]] +name = "identify" +version = "2.5.27" +description = "File identification library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "identify-2.5.27-py2.py3-none-any.whl", hash = "sha256:fdb527b2dfe24602809b2201e033c2a113d7bdf716db3ca8e3243f735dcecaba"}, + {file = "identify-2.5.27.tar.gz", hash = "sha256:287b75b04a0e22d727bc9a41f0d4f3c1bcada97490fa6eabb5b28f0e9097e733"}, +] + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + [[package]] name = "iniconfig" version = "2.0.0" @@ -99,6 +316,20 @@ files = [ {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, ] +[[package]] +name = "nodeenv" +version = "1.8.0" +description = "Node.js virtual environment builder" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +files = [ + {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, + {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, +] + +[package.dependencies] +setuptools = "*" + [[package]] name = "packaging" version = "23.1" @@ -110,6 +341,21 @@ files = [ {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, ] +[[package]] +name = "platformdirs" +version = "3.10.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.7" +files = [ + {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, + {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, +] + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] + [[package]] name = "pluggy" version = "1.3.0" @@ -125,6 +371,43 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "pre-commit" +version = "3.4.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pre_commit-3.4.0-py2.py3-none-any.whl", hash = "sha256:96d529a951f8b677f730a7212442027e8ba53f9b04d217c4c67dc56c393ad945"}, + {file = "pre_commit-3.4.0.tar.gz", hash = "sha256:6bbd5129a64cad4c0dfaeeb12cd8f7ea7e15b77028d985341478c8af3c759522"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "pyproject-api" +version = "1.6.1" +description = "API to interact with the python pyproject.toml based projects" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyproject_api-1.6.1-py3-none-any.whl", hash = "sha256:4c0116d60476b0786c88692cf4e325a9814965e2469c5998b830bba16b183675"}, + {file = "pyproject_api-1.6.1.tar.gz", hash = "sha256:1817dc018adc0d1ff9ca1ed8c60e1623d5aaca40814b953af14a9cf9a5cae538"}, +] + +[package.dependencies] +packaging = ">=23.1" +tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +docs = ["furo (>=2023.8.19)", "sphinx (<7.2)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "setuptools (>=68.1.2)", "wheel (>=0.41.2)"] + [[package]] name = "pytest" version = "7.4.1" @@ -161,6 +444,109 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "pywin32" +version = "306" +description = "Python for Window Extensions" +optional = false +python-versions = "*" +files = [ + {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, + {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, + {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, + {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, + {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, + {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, + {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, + {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, + {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, + {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, + {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, + {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, + {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, + {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + [[package]] name = "s3transfer" version = "0.6.2" @@ -178,6 +564,22 @@ botocore = ">=1.12.36,<2.0a.0" [package.extras] crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] +[[package]] +name = "setuptools" +version = "68.1.2" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-68.1.2-py3-none-any.whl", hash = "sha256:3d8083eed2d13afc9426f227b24fd1659489ec107c0e86cec2ffdde5c92e790b"}, + {file = "setuptools-68.1.2.tar.gz", hash = "sha256:3d4dfa6d95f1b101d695a6160a7626e15583af71a5f52176efa5d39a054d475d"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5,<=7.1.2)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + [[package]] name = "six" version = "1.16.0" @@ -200,6 +602,77 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] +[[package]] +name = "tox" +version = "4.11.1" +description = "tox is a generic virtualenv management and test command line tool" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tox-4.11.1-py3-none-any.whl", hash = "sha256:da761b4a57ee2b92b5ce39f48ff723fc42d185bf2af508effb683214efa662ea"}, + {file = "tox-4.11.1.tar.gz", hash = "sha256:8a8cc94b7269f8e43dfc636eff2da4b33a199a4e575b5b086cc51aae24ac4262"}, +] + +[package.dependencies] +cachetools = ">=5.3.1" +chardet = ">=5.2" +colorama = ">=0.4.6" +filelock = ">=3.12.3" +packaging = ">=23.1" +platformdirs = ">=3.10" +pluggy = ">=1.3" +pyproject-api = ">=1.6.1" +tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} +virtualenv = ">=20.24.3" + +[package.extras] +docs = ["furo (>=2023.8.19)", "sphinx (>=7.2.4)", "sphinx-argparse-cli (>=1.11.1)", "sphinx-autodoc-typehints (>=1.24)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +testing = ["build[virtualenv] (>=0.10)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.1.1)", "devpi-process (>=1)", "diff-cover (>=7.7)", "distlib (>=0.3.7)", "flaky (>=3.7)", "hatch-vcs (>=0.3)", "hatchling (>=1.18)", "psutil (>=5.9.5)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-xdist (>=3.3.1)", "re-assert (>=1.1)", "time-machine (>=2.12)", "wheel (>=0.41.2)"] + +[[package]] +name = "tox-docker" +version = "4.1.0" +description = "Launch a docker instance around test runs" +optional = false +python-versions = "*" +files = [ + {file = "tox-docker-4.1.0.tar.gz", hash = "sha256:0317e692dc80f2197eaf9c905dcb8d1d1f9d5bf2686ecfd83c22a1da9d23fb24"}, + {file = "tox_docker-4.1.0-py2.py3-none-any.whl", hash = "sha256:444c72192a2443d2b4db5766545d4413ea683cc488523d770e2e216f15fa3086"}, +] + +[package.dependencies] +docker = ">=4.0,<7.0" +packaging = "*" +tox = ">=3.0.0,<5.0" + +[[package]] +name = "tox-gh-actions" +version = "3.1.3" +description = "Seamless integration of tox into GitHub Actions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tox-gh-actions-3.1.3.tar.gz", hash = "sha256:ffd4151fe8b62c6f401a2fc5a01317835d7ab380923f6e0d063c300750308328"}, + {file = "tox_gh_actions-3.1.3-py2.py3-none-any.whl", hash = "sha256:5954766fe2ed0e284f3cdc87535dfdf68d0f803f1011b17ff8cf52ed3156e6c1"}, +] + +[package.dependencies] +tox = ">=4,<5" + +[package.extras] +testing = ["black", "devpi-process", "flake8 (>=6,<7)", "mypy", "pytest (>=7,<8)", "pytest-cov (>=3,<4)", "pytest-mock (>=3,<4)", "pytest-randomly (>=3)"] + +[[package]] +name = "typing-extensions" +version = "4.7.1" +description = "Backported and Experimental Type Hints for Python 3.7+" +optional = false +python-versions = ">=3.7" +files = [ + {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, + {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, +] + [[package]] name = "urllib3" version = "1.26.16" @@ -216,7 +689,43 @@ brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +[[package]] +name = "virtualenv" +version = "20.24.4" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.24.4-py3-none-any.whl", hash = "sha256:29c70bb9b88510f6414ac3e55c8b413a1f96239b6b789ca123437d5e892190cb"}, + {file = "virtualenv-20.24.4.tar.gz", hash = "sha256:772b05bfda7ed3b8ecd16021ca9716273ad9f4467c801f27e83ac73430246dca"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<4" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + +[[package]] +name = "websocket-client" +version = "1.6.2" +description = "WebSocket client for Python with low level API options" +optional = false +python-versions = ">=3.8" +files = [ + {file = "websocket-client-1.6.2.tar.gz", hash = "sha256:53e95c826bf800c4c465f50093a8c4ff091c7327023b10bfaff40cf1ef170eaa"}, + {file = "websocket_client-1.6.2-py3-none-any.whl", hash = "sha256:ce54f419dfae71f4bdba69ebe65bf7f0a93fe71bc009ad3a010aacc3eebad537"}, +] + +[package.extras] +docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "94ac11370453c6009b8fd67ad18834668e6d8ae563542d02296bf0d7e583140a" +content-hash = "77ed418b75562d548e1ffa3b2831f12cd32ca5390c7dbbfa6d4023355823d377" diff --git a/pyproject.toml b/pyproject.toml index 0887c8c..08dcdb6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,6 +7,13 @@ readme = "README.md" packages = [{include = "core"}] repository = "https://github.com/dbernstein/palace-quicksight" +[tool.poetry.group.ci.dependencies] +dunamai = "^1.16" +pre-commit = "^3.2" +tox = "^4.4" +tox-docker = "^4.1" +tox-gh-actions = "^3.0" + [tool.poetry.dependencies] python = "^3.10" boto3 = "^1.18" From 834fe19e50dca65ed96c7ee53db86be77e9ffc1d Mon Sep 17 00:00:00 2001 From: Daniel Bernstein Date: Wed, 6 Sep 2023 09:29:20 -0700 Subject: [PATCH 12/30] Add pymarkdown config file and fix README.md. --- .pymarkdown.config.json | 21 +++++++++++++++++++++ README.md | 2 +- 2 files changed, 22 insertions(+), 1 deletion(-) create mode 100644 .pymarkdown.config.json diff --git a/.pymarkdown.config.json b/.pymarkdown.config.json new file mode 100644 index 0000000..06d73c9 --- /dev/null +++ b/.pymarkdown.config.json @@ -0,0 +1,21 @@ +{ + "plugins" : { + "line-length" : { + "line_length" : 120, + "heading_line_length": 120, + "code_block_line_length": 120 + }, + "no-duplicate-heading" : { + "allow_different_nesting": true + }, + "ul-indent" : { + "indent" : 4 + }, + "blanks-around-fences" : { + "list_items": false + }, + "first-line-heading" : { + "enabled": false + } + } +} diff --git a/README.md b/README.md index 4b3ab09..36669c3 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ Exported resources can be found [here](https://github.com/ThePalaceProject/palac ## Usage -``` +```shell ./bin/palace-quicksight --help Usage: palace-quicksight [OPTIONS] COMMAND [ARGS]... From 76167a5fa61bccafd96385254b3a46009ec6c3d7 Mon Sep 17 00:00:00 2001 From: Daniel Bernstein Date: Wed, 6 Sep 2023 09:29:38 -0700 Subject: [PATCH 13/30] Formatting fix. --- core/operation/export_analysis_operation.py | 1 - 1 file changed, 1 deletion(-) diff --git a/core/operation/export_analysis_operation.py b/core/operation/export_analysis_operation.py index 824ddc2..6e8efb9 100644 --- a/core/operation/export_analysis_operation.py +++ b/core/operation/export_analysis_operation.py @@ -18,7 +18,6 @@ def __init__(self, analysis_id: str, output_dir: str, *args, **kwargs): super().__init__(*args, **kwargs) def execute(self): - os.makedirs(self._resolve_path(self._output_dir, TEMPLATE_DIR), exist_ok=True) os.makedirs(self._resolve_path(self._output_dir, DATA_SET_DIR), exist_ok=True) From cdb0dca8741120a53106704e37a937225cdbf763 Mon Sep 17 00:00:00 2001 From: Daniel Bernstein Date: Wed, 6 Sep 2023 09:36:00 -0700 Subject: [PATCH 14/30] Disable isort in pre commit config. --- .pre-commit-config.yaml | 12 ++++++------ core/cli.py | 5 +++-- core/operation/export_analysis_operation.py | 3 +-- core/operation/import_from_json_operation.py | 3 +-- .../core/operation/test_export_analysis_operation.py | 10 +++++++--- 5 files changed, 18 insertions(+), 15 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 89e9708..eb4c646 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -39,12 +39,12 @@ repos: - id: black name: Run black - - repo: https://github.com/PyCQA/isort - rev: 5.12.0 - hooks: - - id: isort - name: Run isort - +# - repo: https://github.com/PyCQA/isort +# rev: 5.12.0 +# hooks: +# - id: isort +# name: Run isort +# - repo: https://github.com/sirosen/check-jsonschema rev: 0.22.0 hooks: diff --git a/core/cli.py b/core/cli.py index f578935..ff78cdf 100644 --- a/core/cli.py +++ b/core/cli.py @@ -5,8 +5,9 @@ from core.operation.export_analysis_operation import ExportAnalysisOperation from core.operation.import_from_json_operation import ImportFromJsonOperation -from core.operation.publish_dashboard_from_template import \ - PublishDashboardFromTemplateOperation +from core.operation.publish_dashboard_from_template import ( + PublishDashboardFromTemplateOperation, +) log = logging.getLogger("core.cli") diff --git a/core/operation/export_analysis_operation.py b/core/operation/export_analysis_operation.py index 6e8efb9..ab3bde3 100644 --- a/core/operation/export_analysis_operation.py +++ b/core/operation/export_analysis_operation.py @@ -2,8 +2,7 @@ import os from typing import List -from core.operation.baseoperation import (DATA_SET_DIR, TEMPLATE_DIR, - BaseOperation) +from core.operation.baseoperation import DATA_SET_DIR, TEMPLATE_DIR, BaseOperation from core.util import recursively_replace_value, retry diff --git a/core/operation/import_from_json_operation.py b/core/operation/import_from_json_operation.py index 3548a19..2a2c597 100644 --- a/core/operation/import_from_json_operation.py +++ b/core/operation/import_from_json_operation.py @@ -1,7 +1,6 @@ import json -from core.operation.baseoperation import (DATA_SET_DIR, TEMPLATE_DIR, - BaseOperation) +from core.operation.baseoperation import DATA_SET_DIR, TEMPLATE_DIR, BaseOperation from core.util import recursively_replace_value diff --git a/tests/core/operation/test_export_analysis_operation.py b/tests/core/operation/test_export_analysis_operation.py index 48cc11b..5a3c275 100644 --- a/tests/core/operation/test_export_analysis_operation.py +++ b/tests/core/operation/test_export_analysis_operation.py @@ -5,9 +5,13 @@ from core.operation.export_analysis_operation import ExportAnalysisOperation from tests.core.operation.analysis_test_responses import ( - create_template_response, describe_data_set_1_response, - describe_data_set_2_response, describe_template_definition_response, - get_analysis_definition_response, get_analysis_description_response) + create_template_response, + describe_data_set_1_response, + describe_data_set_2_response, + describe_template_definition_response, + get_analysis_definition_response, + get_analysis_description_response, +) class TestExportAnalysisOperation: From 4a6a6ffa24b7038d02ae6ea59edb0e6b97496185 Mon Sep 17 00:00:00 2001 From: Daniel Bernstein Date: Wed, 6 Sep 2023 09:40:50 -0700 Subject: [PATCH 15/30] Remove python 3.8 from tox config --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 9f316b1..b1732ea 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py{38,39,310,311}-core +envlist = py{39,310,311}-core skipsdist = true [testenv] From f9e33c7651e3de35e0c76583f1079021bdbbdfea Mon Sep 17 00:00:00 2001 From: Daniel Bernstein Date: Wed, 6 Sep 2023 09:47:40 -0700 Subject: [PATCH 16/30] Fix ci test issue --- tests/core/operation/test_export_analysis_operation.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/core/operation/test_export_analysis_operation.py b/tests/core/operation/test_export_analysis_operation.py index 5a3c275..199d733 100644 --- a/tests/core/operation/test_export_analysis_operation.py +++ b/tests/core/operation/test_export_analysis_operation.py @@ -1,6 +1,7 @@ import os import botocore +from botocore.config import Config from botocore.stub import Stubber from core.operation.export_analysis_operation import ExportAnalysisOperation @@ -20,7 +21,11 @@ def test(self): output_dir = "/tmp/test-output" account = "012345678910" - qs_client = botocore.session.get_session().create_client("quicksight") + boto_config = Config( + region_name='us-east-1', + ) + + qs_client = botocore.session.get_session().create_client("quicksight", config=boto_config) with Stubber(qs_client) as stub: analysis_description_params = { "AwsAccountId": account, From 32798841db96c95f68c1913eb1002a81d824f5dc Mon Sep 17 00:00:00 2001 From: Daniel Bernstein Date: Wed, 6 Sep 2023 09:53:17 -0700 Subject: [PATCH 17/30] Set minimum python version to 3.9 --- .github/workflows/test-build.yml | 2 +- poetry.lock | 4 ++-- pyproject.toml | 2 +- tox.ini | 1 - 4 files changed, 4 insertions(+), 5 deletions(-) diff --git a/.github/workflows/test-build.yml b/.github/workflows/test-build.yml index cddb3fb..0e79a63 100644 --- a/.github/workflows/test-build.yml +++ b/.github/workflows/test-build.yml @@ -15,7 +15,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.8", "3.9", "3.10", "3.11"] + python-version: ["3.9", "3.10", "3.11"] module: [Core] # We want to run on external PRs, but not on our own internal PRs as they'll be run diff --git a/poetry.lock b/poetry.lock index f5358ae..7c205e0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -727,5 +727,5 @@ test = ["websockets"] [metadata] lock-version = "2.0" -python-versions = "^3.10" -content-hash = "77ed418b75562d548e1ffa3b2831f12cd32ca5390c7dbbfa6d4023355823d377" +python-versions = "^3.9" +content-hash = "19b3c796b95ba91307a777f997262bffef23f4484ddc3bb812ecf15a9631346e" diff --git a/pyproject.toml b/pyproject.toml index 08dcdb6..4f4cf78 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,7 +15,7 @@ tox-docker = "^4.1" tox-gh-actions = "^3.0" [tool.poetry.dependencies] -python = "^3.10" +python = "^3.9" boto3 = "^1.18" botocore = "^1.21" click = "^8.1.3" diff --git a/tox.ini b/tox.ini index b1732ea..c49b2c1 100644 --- a/tox.ini +++ b/tox.ini @@ -15,7 +15,6 @@ allowlist_externals = [gh-actions] python = - 3.8: py38 3.9: py39 3.10: py310 3.11: py311 From 6ae3e84dd640785491bc63a8bb5f78560703f749 Mon Sep 17 00:00:00 2001 From: Daniel Bernstein Date: Wed, 6 Sep 2023 09:58:23 -0700 Subject: [PATCH 18/30] Fix formatting. --- tests/core/operation/test_export_analysis_operation.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/core/operation/test_export_analysis_operation.py b/tests/core/operation/test_export_analysis_operation.py index 199d733..e2278b0 100644 --- a/tests/core/operation/test_export_analysis_operation.py +++ b/tests/core/operation/test_export_analysis_operation.py @@ -22,10 +22,12 @@ def test(self): account = "012345678910" boto_config = Config( - region_name='us-east-1', + region_name="us-east-1", ) - qs_client = botocore.session.get_session().create_client("quicksight", config=boto_config) + qs_client = botocore.session.get_session().create_client( + "quicksight", config=boto_config + ) with Stubber(qs_client) as stub: analysis_description_params = { "AwsAccountId": account, From ffd9f9fd20108c3191b69d28de9eeef48bbbcda9 Mon Sep 17 00:00:00 2001 From: Daniel Bernstein Date: Wed, 6 Sep 2023 10:50:34 -0700 Subject: [PATCH 19/30] Fix mypy issues; re-enable isort plugin. --- .pre-commit-config.yaml | 12 +- core/operation/baseoperation.py | 4 +- .../publish_dashboard_from_template.py | 4 +- core/util.py | 2 + poetry.lock | 589 +++++++++++++++++- pyproject.toml | 35 +- tests/__init__.py | 0 tests/core/__init__.py | 0 8 files changed, 634 insertions(+), 12 deletions(-) create mode 100644 tests/__init__.py create mode 100644 tests/core/__init__.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index eb4c646..89e9708 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -39,12 +39,12 @@ repos: - id: black name: Run black -# - repo: https://github.com/PyCQA/isort -# rev: 5.12.0 -# hooks: -# - id: isort -# name: Run isort -# + - repo: https://github.com/PyCQA/isort + rev: 5.12.0 + hooks: + - id: isort + name: Run isort + - repo: https://github.com/sirosen/check-jsonschema rev: 0.22.0 hooks: diff --git a/core/operation/baseoperation.py b/core/operation/baseoperation.py index 02272c4..7078666 100644 --- a/core/operation/baseoperation.py +++ b/core/operation/baseoperation.py @@ -14,7 +14,7 @@ class BaseOperation: A base class for AWS based operations. """ - def __init__(self, qs_client: object, aws_account_id: str): + def __init__(self, qs_client, aws_account_id: str): self._aws_account_id = aws_account_id self._qs_client = qs_client self._log = logging.getLogger(self.__class__.__name__) @@ -23,7 +23,7 @@ def __init__(self, qs_client: object, aws_account_id: str): def execute(self): pass - def _create_or_update_template(self, template_data: dict) -> [str, str]: + def _create_or_update_template(self, template_data: dict) -> tuple[str, str, str]: """ Creates new or updates existing template. :param template_data: diff --git a/core/operation/publish_dashboard_from_template.py b/core/operation/publish_dashboard_from_template.py index 9ea71c7..ed4f1f8 100644 --- a/core/operation/publish_dashboard_from_template.py +++ b/core/operation/publish_dashboard_from_template.py @@ -39,7 +39,7 @@ def execute(self): # extract the data source placeholders dashboard_id = self._template_id - parameters = { + parameters: dict = { "AwsAccountId": self._aws_account_id, "Name": dashboard_id, "DashboardId": dashboard_id, @@ -107,7 +107,7 @@ def execute(self): response = self._qs_client.update_dashboard_permissions(**permissions_params) - def _create_or_update_dashboard(self, dashboard_params: dict) -> [str, str]: + def _create_or_update_dashboard(self, dashboard_params: dict) -> tuple[str, str]: """ Creates new or updates existing template. :param dashboard_params: diff --git a/core/util.py b/core/util.py index 122c2b5..e316d13 100644 --- a/core/util.py +++ b/core/util.py @@ -14,6 +14,8 @@ def retry(func) -> bool: sleep(1) continue + return True + def recursively_replace_value(mydict: dict, key: str, val: str): """ diff --git a/poetry.lock b/poetry.lock index 7c205e0..4ea9e3c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -19,6 +19,382 @@ s3transfer = ">=0.6.0,<0.7.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] +[[package]] +name = "boto3-stubs" +version = "1.28.41" +description = "Type annotations for boto3 1.28.41 generated with mypy-boto3-builder 7.18.2" +optional = false +python-versions = ">=3.7" +files = [ + {file = "boto3-stubs-1.28.41.tar.gz", hash = "sha256:c2023e441cd4017f914d0e6531536bff0869a72b0d83eceeae830534731dd932"}, + {file = "boto3_stubs-1.28.41-py3-none-any.whl", hash = "sha256:e7be0734bbca33ec18e04a59a98719f7b5816adba13e6dd1ae3c2d0fa5b05643"}, +] + +[package.dependencies] +botocore-stubs = "*" +types-s3transfer = "*" +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[package.extras] +accessanalyzer = ["mypy-boto3-accessanalyzer (>=1.28.0,<1.29.0)"] +account = ["mypy-boto3-account (>=1.28.0,<1.29.0)"] +acm = ["mypy-boto3-acm (>=1.28.0,<1.29.0)"] +acm-pca = ["mypy-boto3-acm-pca (>=1.28.0,<1.29.0)"] +alexaforbusiness = ["mypy-boto3-alexaforbusiness (>=1.28.0,<1.29.0)"] +all = ["mypy-boto3-accessanalyzer (>=1.28.0,<1.29.0)", "mypy-boto3-account (>=1.28.0,<1.29.0)", "mypy-boto3-acm (>=1.28.0,<1.29.0)", "mypy-boto3-acm-pca (>=1.28.0,<1.29.0)", "mypy-boto3-alexaforbusiness (>=1.28.0,<1.29.0)", "mypy-boto3-amp (>=1.28.0,<1.29.0)", "mypy-boto3-amplify (>=1.28.0,<1.29.0)", "mypy-boto3-amplifybackend (>=1.28.0,<1.29.0)", "mypy-boto3-amplifyuibuilder (>=1.28.0,<1.29.0)", "mypy-boto3-apigateway (>=1.28.0,<1.29.0)", "mypy-boto3-apigatewaymanagementapi (>=1.28.0,<1.29.0)", "mypy-boto3-apigatewayv2 (>=1.28.0,<1.29.0)", "mypy-boto3-appconfig (>=1.28.0,<1.29.0)", "mypy-boto3-appconfigdata (>=1.28.0,<1.29.0)", "mypy-boto3-appfabric (>=1.28.0,<1.29.0)", "mypy-boto3-appflow (>=1.28.0,<1.29.0)", "mypy-boto3-appintegrations (>=1.28.0,<1.29.0)", "mypy-boto3-application-autoscaling (>=1.28.0,<1.29.0)", "mypy-boto3-application-insights (>=1.28.0,<1.29.0)", "mypy-boto3-applicationcostprofiler (>=1.28.0,<1.29.0)", "mypy-boto3-appmesh (>=1.28.0,<1.29.0)", "mypy-boto3-apprunner (>=1.28.0,<1.29.0)", "mypy-boto3-appstream (>=1.28.0,<1.29.0)", "mypy-boto3-appsync (>=1.28.0,<1.29.0)", "mypy-boto3-arc-zonal-shift (>=1.28.0,<1.29.0)", "mypy-boto3-athena (>=1.28.0,<1.29.0)", "mypy-boto3-auditmanager (>=1.28.0,<1.29.0)", "mypy-boto3-autoscaling (>=1.28.0,<1.29.0)", "mypy-boto3-autoscaling-plans (>=1.28.0,<1.29.0)", "mypy-boto3-backup (>=1.28.0,<1.29.0)", "mypy-boto3-backup-gateway (>=1.28.0,<1.29.0)", "mypy-boto3-backupstorage (>=1.28.0,<1.29.0)", "mypy-boto3-batch (>=1.28.0,<1.29.0)", "mypy-boto3-billingconductor (>=1.28.0,<1.29.0)", "mypy-boto3-braket (>=1.28.0,<1.29.0)", "mypy-boto3-budgets (>=1.28.0,<1.29.0)", "mypy-boto3-ce (>=1.28.0,<1.29.0)", "mypy-boto3-chime (>=1.28.0,<1.29.0)", "mypy-boto3-chime-sdk-identity (>=1.28.0,<1.29.0)", "mypy-boto3-chime-sdk-media-pipelines (>=1.28.0,<1.29.0)", "mypy-boto3-chime-sdk-meetings (>=1.28.0,<1.29.0)", "mypy-boto3-chime-sdk-messaging (>=1.28.0,<1.29.0)", "mypy-boto3-chime-sdk-voice (>=1.28.0,<1.29.0)", "mypy-boto3-cleanrooms (>=1.28.0,<1.29.0)", "mypy-boto3-cloud9 (>=1.28.0,<1.29.0)", "mypy-boto3-cloudcontrol (>=1.28.0,<1.29.0)", "mypy-boto3-clouddirectory (>=1.28.0,<1.29.0)", "mypy-boto3-cloudformation (>=1.28.0,<1.29.0)", "mypy-boto3-cloudfront (>=1.28.0,<1.29.0)", "mypy-boto3-cloudhsm (>=1.28.0,<1.29.0)", "mypy-boto3-cloudhsmv2 (>=1.28.0,<1.29.0)", "mypy-boto3-cloudsearch (>=1.28.0,<1.29.0)", "mypy-boto3-cloudsearchdomain (>=1.28.0,<1.29.0)", "mypy-boto3-cloudtrail (>=1.28.0,<1.29.0)", "mypy-boto3-cloudtrail-data (>=1.28.0,<1.29.0)", "mypy-boto3-cloudwatch (>=1.28.0,<1.29.0)", "mypy-boto3-codeartifact (>=1.28.0,<1.29.0)", "mypy-boto3-codebuild (>=1.28.0,<1.29.0)", "mypy-boto3-codecatalyst (>=1.28.0,<1.29.0)", "mypy-boto3-codecommit (>=1.28.0,<1.29.0)", "mypy-boto3-codedeploy (>=1.28.0,<1.29.0)", "mypy-boto3-codeguru-reviewer (>=1.28.0,<1.29.0)", "mypy-boto3-codeguru-security (>=1.28.0,<1.29.0)", "mypy-boto3-codeguruprofiler (>=1.28.0,<1.29.0)", "mypy-boto3-codepipeline (>=1.28.0,<1.29.0)", "mypy-boto3-codestar (>=1.28.0,<1.29.0)", "mypy-boto3-codestar-connections (>=1.28.0,<1.29.0)", "mypy-boto3-codestar-notifications (>=1.28.0,<1.29.0)", "mypy-boto3-cognito-identity (>=1.28.0,<1.29.0)", "mypy-boto3-cognito-idp (>=1.28.0,<1.29.0)", "mypy-boto3-cognito-sync (>=1.28.0,<1.29.0)", "mypy-boto3-comprehend (>=1.28.0,<1.29.0)", "mypy-boto3-comprehendmedical (>=1.28.0,<1.29.0)", "mypy-boto3-compute-optimizer (>=1.28.0,<1.29.0)", "mypy-boto3-config (>=1.28.0,<1.29.0)", "mypy-boto3-connect (>=1.28.0,<1.29.0)", "mypy-boto3-connect-contact-lens (>=1.28.0,<1.29.0)", "mypy-boto3-connectcampaigns (>=1.28.0,<1.29.0)", "mypy-boto3-connectcases (>=1.28.0,<1.29.0)", "mypy-boto3-connectparticipant (>=1.28.0,<1.29.0)", "mypy-boto3-controltower (>=1.28.0,<1.29.0)", "mypy-boto3-cur (>=1.28.0,<1.29.0)", "mypy-boto3-customer-profiles (>=1.28.0,<1.29.0)", "mypy-boto3-databrew (>=1.28.0,<1.29.0)", "mypy-boto3-dataexchange (>=1.28.0,<1.29.0)", "mypy-boto3-datapipeline (>=1.28.0,<1.29.0)", "mypy-boto3-datasync (>=1.28.0,<1.29.0)", "mypy-boto3-dax (>=1.28.0,<1.29.0)", "mypy-boto3-detective (>=1.28.0,<1.29.0)", "mypy-boto3-devicefarm (>=1.28.0,<1.29.0)", "mypy-boto3-devops-guru (>=1.28.0,<1.29.0)", "mypy-boto3-directconnect (>=1.28.0,<1.29.0)", "mypy-boto3-discovery (>=1.28.0,<1.29.0)", "mypy-boto3-dlm (>=1.28.0,<1.29.0)", "mypy-boto3-dms (>=1.28.0,<1.29.0)", "mypy-boto3-docdb (>=1.28.0,<1.29.0)", "mypy-boto3-docdb-elastic (>=1.28.0,<1.29.0)", "mypy-boto3-drs (>=1.28.0,<1.29.0)", "mypy-boto3-ds (>=1.28.0,<1.29.0)", "mypy-boto3-dynamodb (>=1.28.0,<1.29.0)", "mypy-boto3-dynamodbstreams (>=1.28.0,<1.29.0)", "mypy-boto3-ebs (>=1.28.0,<1.29.0)", "mypy-boto3-ec2 (>=1.28.0,<1.29.0)", "mypy-boto3-ec2-instance-connect (>=1.28.0,<1.29.0)", "mypy-boto3-ecr (>=1.28.0,<1.29.0)", "mypy-boto3-ecr-public (>=1.28.0,<1.29.0)", "mypy-boto3-ecs (>=1.28.0,<1.29.0)", "mypy-boto3-efs (>=1.28.0,<1.29.0)", "mypy-boto3-eks (>=1.28.0,<1.29.0)", "mypy-boto3-elastic-inference (>=1.28.0,<1.29.0)", "mypy-boto3-elasticache (>=1.28.0,<1.29.0)", "mypy-boto3-elasticbeanstalk (>=1.28.0,<1.29.0)", "mypy-boto3-elastictranscoder (>=1.28.0,<1.29.0)", "mypy-boto3-elb (>=1.28.0,<1.29.0)", "mypy-boto3-elbv2 (>=1.28.0,<1.29.0)", "mypy-boto3-emr (>=1.28.0,<1.29.0)", "mypy-boto3-emr-containers (>=1.28.0,<1.29.0)", "mypy-boto3-emr-serverless (>=1.28.0,<1.29.0)", "mypy-boto3-entityresolution (>=1.28.0,<1.29.0)", "mypy-boto3-es (>=1.28.0,<1.29.0)", "mypy-boto3-events (>=1.28.0,<1.29.0)", "mypy-boto3-evidently (>=1.28.0,<1.29.0)", "mypy-boto3-finspace (>=1.28.0,<1.29.0)", "mypy-boto3-finspace-data (>=1.28.0,<1.29.0)", "mypy-boto3-firehose (>=1.28.0,<1.29.0)", "mypy-boto3-fis (>=1.28.0,<1.29.0)", "mypy-boto3-fms (>=1.28.0,<1.29.0)", "mypy-boto3-forecast (>=1.28.0,<1.29.0)", "mypy-boto3-forecastquery (>=1.28.0,<1.29.0)", "mypy-boto3-frauddetector (>=1.28.0,<1.29.0)", "mypy-boto3-fsx (>=1.28.0,<1.29.0)", "mypy-boto3-gamelift (>=1.28.0,<1.29.0)", "mypy-boto3-gamesparks (>=1.28.0,<1.29.0)", "mypy-boto3-glacier (>=1.28.0,<1.29.0)", "mypy-boto3-globalaccelerator (>=1.28.0,<1.29.0)", "mypy-boto3-glue (>=1.28.0,<1.29.0)", "mypy-boto3-grafana (>=1.28.0,<1.29.0)", "mypy-boto3-greengrass (>=1.28.0,<1.29.0)", "mypy-boto3-greengrassv2 (>=1.28.0,<1.29.0)", "mypy-boto3-groundstation (>=1.28.0,<1.29.0)", "mypy-boto3-guardduty (>=1.28.0,<1.29.0)", "mypy-boto3-health (>=1.28.0,<1.29.0)", "mypy-boto3-healthlake (>=1.28.0,<1.29.0)", "mypy-boto3-honeycode (>=1.28.0,<1.29.0)", "mypy-boto3-iam (>=1.28.0,<1.29.0)", "mypy-boto3-identitystore (>=1.28.0,<1.29.0)", "mypy-boto3-imagebuilder (>=1.28.0,<1.29.0)", "mypy-boto3-importexport (>=1.28.0,<1.29.0)", "mypy-boto3-inspector (>=1.28.0,<1.29.0)", "mypy-boto3-inspector2 (>=1.28.0,<1.29.0)", "mypy-boto3-internetmonitor (>=1.28.0,<1.29.0)", "mypy-boto3-iot (>=1.28.0,<1.29.0)", "mypy-boto3-iot-data (>=1.28.0,<1.29.0)", "mypy-boto3-iot-jobs-data (>=1.28.0,<1.29.0)", "mypy-boto3-iot-roborunner (>=1.28.0,<1.29.0)", "mypy-boto3-iot1click-devices (>=1.28.0,<1.29.0)", "mypy-boto3-iot1click-projects (>=1.28.0,<1.29.0)", "mypy-boto3-iotanalytics (>=1.28.0,<1.29.0)", "mypy-boto3-iotdeviceadvisor (>=1.28.0,<1.29.0)", "mypy-boto3-iotevents (>=1.28.0,<1.29.0)", "mypy-boto3-iotevents-data (>=1.28.0,<1.29.0)", "mypy-boto3-iotfleethub (>=1.28.0,<1.29.0)", "mypy-boto3-iotfleetwise (>=1.28.0,<1.29.0)", "mypy-boto3-iotsecuretunneling (>=1.28.0,<1.29.0)", "mypy-boto3-iotsitewise (>=1.28.0,<1.29.0)", "mypy-boto3-iotthingsgraph (>=1.28.0,<1.29.0)", "mypy-boto3-iottwinmaker (>=1.28.0,<1.29.0)", "mypy-boto3-iotwireless (>=1.28.0,<1.29.0)", "mypy-boto3-ivs (>=1.28.0,<1.29.0)", "mypy-boto3-ivs-realtime (>=1.28.0,<1.29.0)", "mypy-boto3-ivschat (>=1.28.0,<1.29.0)", "mypy-boto3-kafka (>=1.28.0,<1.29.0)", "mypy-boto3-kafkaconnect (>=1.28.0,<1.29.0)", "mypy-boto3-kendra (>=1.28.0,<1.29.0)", "mypy-boto3-kendra-ranking (>=1.28.0,<1.29.0)", "mypy-boto3-keyspaces (>=1.28.0,<1.29.0)", "mypy-boto3-kinesis (>=1.28.0,<1.29.0)", "mypy-boto3-kinesis-video-archived-media (>=1.28.0,<1.29.0)", "mypy-boto3-kinesis-video-media (>=1.28.0,<1.29.0)", "mypy-boto3-kinesis-video-signaling (>=1.28.0,<1.29.0)", "mypy-boto3-kinesis-video-webrtc-storage (>=1.28.0,<1.29.0)", "mypy-boto3-kinesisanalytics (>=1.28.0,<1.29.0)", "mypy-boto3-kinesisanalyticsv2 (>=1.28.0,<1.29.0)", "mypy-boto3-kinesisvideo (>=1.28.0,<1.29.0)", "mypy-boto3-kms (>=1.28.0,<1.29.0)", "mypy-boto3-lakeformation (>=1.28.0,<1.29.0)", "mypy-boto3-lambda (>=1.28.0,<1.29.0)", "mypy-boto3-lex-models (>=1.28.0,<1.29.0)", "mypy-boto3-lex-runtime (>=1.28.0,<1.29.0)", "mypy-boto3-lexv2-models (>=1.28.0,<1.29.0)", "mypy-boto3-lexv2-runtime (>=1.28.0,<1.29.0)", "mypy-boto3-license-manager (>=1.28.0,<1.29.0)", "mypy-boto3-license-manager-linux-subscriptions (>=1.28.0,<1.29.0)", "mypy-boto3-license-manager-user-subscriptions (>=1.28.0,<1.29.0)", "mypy-boto3-lightsail (>=1.28.0,<1.29.0)", "mypy-boto3-location (>=1.28.0,<1.29.0)", "mypy-boto3-logs (>=1.28.0,<1.29.0)", "mypy-boto3-lookoutequipment (>=1.28.0,<1.29.0)", "mypy-boto3-lookoutmetrics (>=1.28.0,<1.29.0)", "mypy-boto3-lookoutvision (>=1.28.0,<1.29.0)", "mypy-boto3-m2 (>=1.28.0,<1.29.0)", "mypy-boto3-machinelearning (>=1.28.0,<1.29.0)", "mypy-boto3-macie (>=1.28.0,<1.29.0)", "mypy-boto3-macie2 (>=1.28.0,<1.29.0)", "mypy-boto3-managedblockchain (>=1.28.0,<1.29.0)", "mypy-boto3-managedblockchain-query (>=1.28.0,<1.29.0)", "mypy-boto3-marketplace-catalog (>=1.28.0,<1.29.0)", "mypy-boto3-marketplace-entitlement (>=1.28.0,<1.29.0)", "mypy-boto3-marketplacecommerceanalytics (>=1.28.0,<1.29.0)", "mypy-boto3-mediaconnect (>=1.28.0,<1.29.0)", "mypy-boto3-mediaconvert (>=1.28.0,<1.29.0)", "mypy-boto3-medialive (>=1.28.0,<1.29.0)", "mypy-boto3-mediapackage (>=1.28.0,<1.29.0)", "mypy-boto3-mediapackage-vod (>=1.28.0,<1.29.0)", "mypy-boto3-mediapackagev2 (>=1.28.0,<1.29.0)", "mypy-boto3-mediastore (>=1.28.0,<1.29.0)", "mypy-boto3-mediastore-data (>=1.28.0,<1.29.0)", "mypy-boto3-mediatailor (>=1.28.0,<1.29.0)", "mypy-boto3-medical-imaging (>=1.28.0,<1.29.0)", "mypy-boto3-memorydb (>=1.28.0,<1.29.0)", "mypy-boto3-meteringmarketplace (>=1.28.0,<1.29.0)", "mypy-boto3-mgh (>=1.28.0,<1.29.0)", "mypy-boto3-mgn (>=1.28.0,<1.29.0)", "mypy-boto3-migration-hub-refactor-spaces (>=1.28.0,<1.29.0)", "mypy-boto3-migrationhub-config (>=1.28.0,<1.29.0)", "mypy-boto3-migrationhuborchestrator (>=1.28.0,<1.29.0)", "mypy-boto3-migrationhubstrategy (>=1.28.0,<1.29.0)", "mypy-boto3-mobile (>=1.28.0,<1.29.0)", "mypy-boto3-mq (>=1.28.0,<1.29.0)", "mypy-boto3-mturk (>=1.28.0,<1.29.0)", "mypy-boto3-mwaa (>=1.28.0,<1.29.0)", "mypy-boto3-neptune (>=1.28.0,<1.29.0)", "mypy-boto3-neptunedata (>=1.28.0,<1.29.0)", "mypy-boto3-network-firewall (>=1.28.0,<1.29.0)", "mypy-boto3-networkmanager (>=1.28.0,<1.29.0)", "mypy-boto3-nimble (>=1.28.0,<1.29.0)", "mypy-boto3-oam (>=1.28.0,<1.29.0)", "mypy-boto3-omics (>=1.28.0,<1.29.0)", "mypy-boto3-opensearch (>=1.28.0,<1.29.0)", "mypy-boto3-opensearchserverless (>=1.28.0,<1.29.0)", "mypy-boto3-opsworks (>=1.28.0,<1.29.0)", "mypy-boto3-opsworkscm (>=1.28.0,<1.29.0)", "mypy-boto3-organizations (>=1.28.0,<1.29.0)", "mypy-boto3-osis (>=1.28.0,<1.29.0)", "mypy-boto3-outposts (>=1.28.0,<1.29.0)", "mypy-boto3-panorama (>=1.28.0,<1.29.0)", "mypy-boto3-payment-cryptography (>=1.28.0,<1.29.0)", "mypy-boto3-payment-cryptography-data (>=1.28.0,<1.29.0)", "mypy-boto3-pca-connector-ad (>=1.28.0,<1.29.0)", "mypy-boto3-personalize (>=1.28.0,<1.29.0)", "mypy-boto3-personalize-events (>=1.28.0,<1.29.0)", "mypy-boto3-personalize-runtime (>=1.28.0,<1.29.0)", "mypy-boto3-pi (>=1.28.0,<1.29.0)", "mypy-boto3-pinpoint (>=1.28.0,<1.29.0)", "mypy-boto3-pinpoint-email (>=1.28.0,<1.29.0)", "mypy-boto3-pinpoint-sms-voice (>=1.28.0,<1.29.0)", "mypy-boto3-pinpoint-sms-voice-v2 (>=1.28.0,<1.29.0)", "mypy-boto3-pipes (>=1.28.0,<1.29.0)", "mypy-boto3-polly (>=1.28.0,<1.29.0)", "mypy-boto3-pricing (>=1.28.0,<1.29.0)", "mypy-boto3-privatenetworks (>=1.28.0,<1.29.0)", "mypy-boto3-proton (>=1.28.0,<1.29.0)", "mypy-boto3-qldb (>=1.28.0,<1.29.0)", "mypy-boto3-qldb-session (>=1.28.0,<1.29.0)", "mypy-boto3-quicksight (>=1.28.0,<1.29.0)", "mypy-boto3-ram (>=1.28.0,<1.29.0)", "mypy-boto3-rbin (>=1.28.0,<1.29.0)", "mypy-boto3-rds (>=1.28.0,<1.29.0)", "mypy-boto3-rds-data (>=1.28.0,<1.29.0)", "mypy-boto3-redshift (>=1.28.0,<1.29.0)", "mypy-boto3-redshift-data (>=1.28.0,<1.29.0)", "mypy-boto3-redshift-serverless (>=1.28.0,<1.29.0)", "mypy-boto3-rekognition (>=1.28.0,<1.29.0)", "mypy-boto3-resiliencehub (>=1.28.0,<1.29.0)", "mypy-boto3-resource-explorer-2 (>=1.28.0,<1.29.0)", "mypy-boto3-resource-groups (>=1.28.0,<1.29.0)", "mypy-boto3-resourcegroupstaggingapi (>=1.28.0,<1.29.0)", "mypy-boto3-robomaker (>=1.28.0,<1.29.0)", "mypy-boto3-rolesanywhere (>=1.28.0,<1.29.0)", "mypy-boto3-route53 (>=1.28.0,<1.29.0)", "mypy-boto3-route53-recovery-cluster (>=1.28.0,<1.29.0)", "mypy-boto3-route53-recovery-control-config (>=1.28.0,<1.29.0)", "mypy-boto3-route53-recovery-readiness (>=1.28.0,<1.29.0)", "mypy-boto3-route53domains (>=1.28.0,<1.29.0)", "mypy-boto3-route53resolver (>=1.28.0,<1.29.0)", "mypy-boto3-rum (>=1.28.0,<1.29.0)", "mypy-boto3-s3 (>=1.28.0,<1.29.0)", "mypy-boto3-s3control (>=1.28.0,<1.29.0)", "mypy-boto3-s3outposts (>=1.28.0,<1.29.0)", "mypy-boto3-sagemaker (>=1.28.0,<1.29.0)", "mypy-boto3-sagemaker-a2i-runtime (>=1.28.0,<1.29.0)", "mypy-boto3-sagemaker-edge (>=1.28.0,<1.29.0)", "mypy-boto3-sagemaker-featurestore-runtime (>=1.28.0,<1.29.0)", "mypy-boto3-sagemaker-geospatial (>=1.28.0,<1.29.0)", "mypy-boto3-sagemaker-metrics (>=1.28.0,<1.29.0)", "mypy-boto3-sagemaker-runtime (>=1.28.0,<1.29.0)", "mypy-boto3-savingsplans (>=1.28.0,<1.29.0)", "mypy-boto3-scheduler (>=1.28.0,<1.29.0)", "mypy-boto3-schemas (>=1.28.0,<1.29.0)", "mypy-boto3-sdb (>=1.28.0,<1.29.0)", "mypy-boto3-secretsmanager (>=1.28.0,<1.29.0)", "mypy-boto3-securityhub (>=1.28.0,<1.29.0)", "mypy-boto3-securitylake (>=1.28.0,<1.29.0)", "mypy-boto3-serverlessrepo (>=1.28.0,<1.29.0)", "mypy-boto3-service-quotas (>=1.28.0,<1.29.0)", "mypy-boto3-servicecatalog (>=1.28.0,<1.29.0)", "mypy-boto3-servicecatalog-appregistry (>=1.28.0,<1.29.0)", "mypy-boto3-servicediscovery (>=1.28.0,<1.29.0)", "mypy-boto3-ses (>=1.28.0,<1.29.0)", "mypy-boto3-sesv2 (>=1.28.0,<1.29.0)", "mypy-boto3-shield (>=1.28.0,<1.29.0)", "mypy-boto3-signer (>=1.28.0,<1.29.0)", "mypy-boto3-simspaceweaver (>=1.28.0,<1.29.0)", "mypy-boto3-sms (>=1.28.0,<1.29.0)", "mypy-boto3-sms-voice (>=1.28.0,<1.29.0)", "mypy-boto3-snow-device-management (>=1.28.0,<1.29.0)", "mypy-boto3-snowball (>=1.28.0,<1.29.0)", "mypy-boto3-sns (>=1.28.0,<1.29.0)", "mypy-boto3-sqs (>=1.28.0,<1.29.0)", "mypy-boto3-ssm (>=1.28.0,<1.29.0)", "mypy-boto3-ssm-contacts (>=1.28.0,<1.29.0)", "mypy-boto3-ssm-incidents (>=1.28.0,<1.29.0)", "mypy-boto3-ssm-sap (>=1.28.0,<1.29.0)", "mypy-boto3-sso (>=1.28.0,<1.29.0)", "mypy-boto3-sso-admin (>=1.28.0,<1.29.0)", "mypy-boto3-sso-oidc (>=1.28.0,<1.29.0)", "mypy-boto3-stepfunctions (>=1.28.0,<1.29.0)", "mypy-boto3-storagegateway (>=1.28.0,<1.29.0)", "mypy-boto3-sts (>=1.28.0,<1.29.0)", "mypy-boto3-support (>=1.28.0,<1.29.0)", "mypy-boto3-support-app (>=1.28.0,<1.29.0)", "mypy-boto3-swf (>=1.28.0,<1.29.0)", "mypy-boto3-synthetics (>=1.28.0,<1.29.0)", "mypy-boto3-textract (>=1.28.0,<1.29.0)", "mypy-boto3-timestream-query (>=1.28.0,<1.29.0)", "mypy-boto3-timestream-write (>=1.28.0,<1.29.0)", "mypy-boto3-tnb (>=1.28.0,<1.29.0)", "mypy-boto3-transcribe (>=1.28.0,<1.29.0)", "mypy-boto3-transfer (>=1.28.0,<1.29.0)", "mypy-boto3-translate (>=1.28.0,<1.29.0)", "mypy-boto3-verifiedpermissions (>=1.28.0,<1.29.0)", "mypy-boto3-voice-id (>=1.28.0,<1.29.0)", "mypy-boto3-vpc-lattice (>=1.28.0,<1.29.0)", "mypy-boto3-waf (>=1.28.0,<1.29.0)", "mypy-boto3-waf-regional (>=1.28.0,<1.29.0)", "mypy-boto3-wafv2 (>=1.28.0,<1.29.0)", "mypy-boto3-wellarchitected (>=1.28.0,<1.29.0)", "mypy-boto3-wisdom (>=1.28.0,<1.29.0)", "mypy-boto3-workdocs (>=1.28.0,<1.29.0)", "mypy-boto3-worklink (>=1.28.0,<1.29.0)", "mypy-boto3-workmail (>=1.28.0,<1.29.0)", "mypy-boto3-workmailmessageflow (>=1.28.0,<1.29.0)", "mypy-boto3-workspaces (>=1.28.0,<1.29.0)", "mypy-boto3-workspaces-web (>=1.28.0,<1.29.0)", "mypy-boto3-xray (>=1.28.0,<1.29.0)"] +amp = ["mypy-boto3-amp (>=1.28.0,<1.29.0)"] +amplify = ["mypy-boto3-amplify (>=1.28.0,<1.29.0)"] +amplifybackend = ["mypy-boto3-amplifybackend (>=1.28.0,<1.29.0)"] +amplifyuibuilder = ["mypy-boto3-amplifyuibuilder (>=1.28.0,<1.29.0)"] +apigateway = ["mypy-boto3-apigateway (>=1.28.0,<1.29.0)"] +apigatewaymanagementapi = ["mypy-boto3-apigatewaymanagementapi (>=1.28.0,<1.29.0)"] +apigatewayv2 = ["mypy-boto3-apigatewayv2 (>=1.28.0,<1.29.0)"] +appconfig = ["mypy-boto3-appconfig (>=1.28.0,<1.29.0)"] +appconfigdata = ["mypy-boto3-appconfigdata (>=1.28.0,<1.29.0)"] +appfabric = ["mypy-boto3-appfabric (>=1.28.0,<1.29.0)"] +appflow = ["mypy-boto3-appflow (>=1.28.0,<1.29.0)"] +appintegrations = ["mypy-boto3-appintegrations (>=1.28.0,<1.29.0)"] +application-autoscaling = ["mypy-boto3-application-autoscaling (>=1.28.0,<1.29.0)"] +application-insights = ["mypy-boto3-application-insights (>=1.28.0,<1.29.0)"] +applicationcostprofiler = ["mypy-boto3-applicationcostprofiler (>=1.28.0,<1.29.0)"] +appmesh = ["mypy-boto3-appmesh (>=1.28.0,<1.29.0)"] +apprunner = ["mypy-boto3-apprunner (>=1.28.0,<1.29.0)"] +appstream = ["mypy-boto3-appstream (>=1.28.0,<1.29.0)"] +appsync = ["mypy-boto3-appsync (>=1.28.0,<1.29.0)"] +arc-zonal-shift = ["mypy-boto3-arc-zonal-shift (>=1.28.0,<1.29.0)"] +athena = ["mypy-boto3-athena (>=1.28.0,<1.29.0)"] +auditmanager = ["mypy-boto3-auditmanager (>=1.28.0,<1.29.0)"] +autoscaling = ["mypy-boto3-autoscaling (>=1.28.0,<1.29.0)"] +autoscaling-plans = ["mypy-boto3-autoscaling-plans (>=1.28.0,<1.29.0)"] +backup = ["mypy-boto3-backup (>=1.28.0,<1.29.0)"] +backup-gateway = ["mypy-boto3-backup-gateway (>=1.28.0,<1.29.0)"] +backupstorage = ["mypy-boto3-backupstorage (>=1.28.0,<1.29.0)"] +batch = ["mypy-boto3-batch (>=1.28.0,<1.29.0)"] +billingconductor = ["mypy-boto3-billingconductor (>=1.28.0,<1.29.0)"] +boto3 = ["boto3 (==1.28.41)", "botocore (==1.31.41)"] +braket = ["mypy-boto3-braket (>=1.28.0,<1.29.0)"] +budgets = ["mypy-boto3-budgets (>=1.28.0,<1.29.0)"] +ce = ["mypy-boto3-ce (>=1.28.0,<1.29.0)"] +chime = ["mypy-boto3-chime (>=1.28.0,<1.29.0)"] +chime-sdk-identity = ["mypy-boto3-chime-sdk-identity (>=1.28.0,<1.29.0)"] +chime-sdk-media-pipelines = ["mypy-boto3-chime-sdk-media-pipelines (>=1.28.0,<1.29.0)"] +chime-sdk-meetings = ["mypy-boto3-chime-sdk-meetings (>=1.28.0,<1.29.0)"] +chime-sdk-messaging = ["mypy-boto3-chime-sdk-messaging (>=1.28.0,<1.29.0)"] +chime-sdk-voice = ["mypy-boto3-chime-sdk-voice (>=1.28.0,<1.29.0)"] +cleanrooms = ["mypy-boto3-cleanrooms (>=1.28.0,<1.29.0)"] +cloud9 = ["mypy-boto3-cloud9 (>=1.28.0,<1.29.0)"] +cloudcontrol = ["mypy-boto3-cloudcontrol (>=1.28.0,<1.29.0)"] +clouddirectory = ["mypy-boto3-clouddirectory (>=1.28.0,<1.29.0)"] +cloudformation = ["mypy-boto3-cloudformation (>=1.28.0,<1.29.0)"] +cloudfront = ["mypy-boto3-cloudfront (>=1.28.0,<1.29.0)"] +cloudhsm = ["mypy-boto3-cloudhsm (>=1.28.0,<1.29.0)"] +cloudhsmv2 = ["mypy-boto3-cloudhsmv2 (>=1.28.0,<1.29.0)"] +cloudsearch = ["mypy-boto3-cloudsearch (>=1.28.0,<1.29.0)"] +cloudsearchdomain = ["mypy-boto3-cloudsearchdomain (>=1.28.0,<1.29.0)"] +cloudtrail = ["mypy-boto3-cloudtrail (>=1.28.0,<1.29.0)"] +cloudtrail-data = ["mypy-boto3-cloudtrail-data (>=1.28.0,<1.29.0)"] +cloudwatch = ["mypy-boto3-cloudwatch (>=1.28.0,<1.29.0)"] +codeartifact = ["mypy-boto3-codeartifact (>=1.28.0,<1.29.0)"] +codebuild = ["mypy-boto3-codebuild (>=1.28.0,<1.29.0)"] +codecatalyst = ["mypy-boto3-codecatalyst (>=1.28.0,<1.29.0)"] +codecommit = ["mypy-boto3-codecommit (>=1.28.0,<1.29.0)"] +codedeploy = ["mypy-boto3-codedeploy (>=1.28.0,<1.29.0)"] +codeguru-reviewer = ["mypy-boto3-codeguru-reviewer (>=1.28.0,<1.29.0)"] +codeguru-security = ["mypy-boto3-codeguru-security (>=1.28.0,<1.29.0)"] +codeguruprofiler = ["mypy-boto3-codeguruprofiler (>=1.28.0,<1.29.0)"] +codepipeline = ["mypy-boto3-codepipeline (>=1.28.0,<1.29.0)"] +codestar = ["mypy-boto3-codestar (>=1.28.0,<1.29.0)"] +codestar-connections = ["mypy-boto3-codestar-connections (>=1.28.0,<1.29.0)"] +codestar-notifications = ["mypy-boto3-codestar-notifications (>=1.28.0,<1.29.0)"] +cognito-identity = ["mypy-boto3-cognito-identity (>=1.28.0,<1.29.0)"] +cognito-idp = ["mypy-boto3-cognito-idp (>=1.28.0,<1.29.0)"] +cognito-sync = ["mypy-boto3-cognito-sync (>=1.28.0,<1.29.0)"] +comprehend = ["mypy-boto3-comprehend (>=1.28.0,<1.29.0)"] +comprehendmedical = ["mypy-boto3-comprehendmedical (>=1.28.0,<1.29.0)"] +compute-optimizer = ["mypy-boto3-compute-optimizer (>=1.28.0,<1.29.0)"] +config = ["mypy-boto3-config (>=1.28.0,<1.29.0)"] +connect = ["mypy-boto3-connect (>=1.28.0,<1.29.0)"] +connect-contact-lens = ["mypy-boto3-connect-contact-lens (>=1.28.0,<1.29.0)"] +connectcampaigns = ["mypy-boto3-connectcampaigns (>=1.28.0,<1.29.0)"] +connectcases = ["mypy-boto3-connectcases (>=1.28.0,<1.29.0)"] +connectparticipant = ["mypy-boto3-connectparticipant (>=1.28.0,<1.29.0)"] +controltower = ["mypy-boto3-controltower (>=1.28.0,<1.29.0)"] +cur = ["mypy-boto3-cur (>=1.28.0,<1.29.0)"] +customer-profiles = ["mypy-boto3-customer-profiles (>=1.28.0,<1.29.0)"] +databrew = ["mypy-boto3-databrew (>=1.28.0,<1.29.0)"] +dataexchange = ["mypy-boto3-dataexchange (>=1.28.0,<1.29.0)"] +datapipeline = ["mypy-boto3-datapipeline (>=1.28.0,<1.29.0)"] +datasync = ["mypy-boto3-datasync (>=1.28.0,<1.29.0)"] +dax = ["mypy-boto3-dax (>=1.28.0,<1.29.0)"] +detective = ["mypy-boto3-detective (>=1.28.0,<1.29.0)"] +devicefarm = ["mypy-boto3-devicefarm (>=1.28.0,<1.29.0)"] +devops-guru = ["mypy-boto3-devops-guru (>=1.28.0,<1.29.0)"] +directconnect = ["mypy-boto3-directconnect (>=1.28.0,<1.29.0)"] +discovery = ["mypy-boto3-discovery (>=1.28.0,<1.29.0)"] +dlm = ["mypy-boto3-dlm (>=1.28.0,<1.29.0)"] +dms = ["mypy-boto3-dms (>=1.28.0,<1.29.0)"] +docdb = ["mypy-boto3-docdb (>=1.28.0,<1.29.0)"] +docdb-elastic = ["mypy-boto3-docdb-elastic (>=1.28.0,<1.29.0)"] +drs = ["mypy-boto3-drs (>=1.28.0,<1.29.0)"] +ds = ["mypy-boto3-ds (>=1.28.0,<1.29.0)"] +dynamodb = ["mypy-boto3-dynamodb (>=1.28.0,<1.29.0)"] +dynamodbstreams = ["mypy-boto3-dynamodbstreams (>=1.28.0,<1.29.0)"] +ebs = ["mypy-boto3-ebs (>=1.28.0,<1.29.0)"] +ec2 = ["mypy-boto3-ec2 (>=1.28.0,<1.29.0)"] +ec2-instance-connect = ["mypy-boto3-ec2-instance-connect (>=1.28.0,<1.29.0)"] +ecr = ["mypy-boto3-ecr (>=1.28.0,<1.29.0)"] +ecr-public = ["mypy-boto3-ecr-public (>=1.28.0,<1.29.0)"] +ecs = ["mypy-boto3-ecs (>=1.28.0,<1.29.0)"] +efs = ["mypy-boto3-efs (>=1.28.0,<1.29.0)"] +eks = ["mypy-boto3-eks (>=1.28.0,<1.29.0)"] +elastic-inference = ["mypy-boto3-elastic-inference (>=1.28.0,<1.29.0)"] +elasticache = ["mypy-boto3-elasticache (>=1.28.0,<1.29.0)"] +elasticbeanstalk = ["mypy-boto3-elasticbeanstalk (>=1.28.0,<1.29.0)"] +elastictranscoder = ["mypy-boto3-elastictranscoder (>=1.28.0,<1.29.0)"] +elb = ["mypy-boto3-elb (>=1.28.0,<1.29.0)"] +elbv2 = ["mypy-boto3-elbv2 (>=1.28.0,<1.29.0)"] +emr = ["mypy-boto3-emr (>=1.28.0,<1.29.0)"] +emr-containers = ["mypy-boto3-emr-containers (>=1.28.0,<1.29.0)"] +emr-serverless = ["mypy-boto3-emr-serverless (>=1.28.0,<1.29.0)"] +entityresolution = ["mypy-boto3-entityresolution (>=1.28.0,<1.29.0)"] +es = ["mypy-boto3-es (>=1.28.0,<1.29.0)"] +essential = ["mypy-boto3-cloudformation (>=1.28.0,<1.29.0)", "mypy-boto3-dynamodb (>=1.28.0,<1.29.0)", "mypy-boto3-ec2 (>=1.28.0,<1.29.0)", "mypy-boto3-lambda (>=1.28.0,<1.29.0)", "mypy-boto3-rds (>=1.28.0,<1.29.0)", "mypy-boto3-s3 (>=1.28.0,<1.29.0)", "mypy-boto3-sqs (>=1.28.0,<1.29.0)"] +events = ["mypy-boto3-events (>=1.28.0,<1.29.0)"] +evidently = ["mypy-boto3-evidently (>=1.28.0,<1.29.0)"] +finspace = ["mypy-boto3-finspace (>=1.28.0,<1.29.0)"] +finspace-data = ["mypy-boto3-finspace-data (>=1.28.0,<1.29.0)"] +firehose = ["mypy-boto3-firehose (>=1.28.0,<1.29.0)"] +fis = ["mypy-boto3-fis (>=1.28.0,<1.29.0)"] +fms = ["mypy-boto3-fms (>=1.28.0,<1.29.0)"] +forecast = ["mypy-boto3-forecast (>=1.28.0,<1.29.0)"] +forecastquery = ["mypy-boto3-forecastquery (>=1.28.0,<1.29.0)"] +frauddetector = ["mypy-boto3-frauddetector (>=1.28.0,<1.29.0)"] +fsx = ["mypy-boto3-fsx (>=1.28.0,<1.29.0)"] +gamelift = ["mypy-boto3-gamelift (>=1.28.0,<1.29.0)"] +gamesparks = ["mypy-boto3-gamesparks (>=1.28.0,<1.29.0)"] +glacier = ["mypy-boto3-glacier (>=1.28.0,<1.29.0)"] +globalaccelerator = ["mypy-boto3-globalaccelerator (>=1.28.0,<1.29.0)"] +glue = ["mypy-boto3-glue (>=1.28.0,<1.29.0)"] +grafana = ["mypy-boto3-grafana (>=1.28.0,<1.29.0)"] +greengrass = ["mypy-boto3-greengrass (>=1.28.0,<1.29.0)"] +greengrassv2 = ["mypy-boto3-greengrassv2 (>=1.28.0,<1.29.0)"] +groundstation = ["mypy-boto3-groundstation (>=1.28.0,<1.29.0)"] +guardduty = ["mypy-boto3-guardduty (>=1.28.0,<1.29.0)"] +health = ["mypy-boto3-health (>=1.28.0,<1.29.0)"] +healthlake = ["mypy-boto3-healthlake (>=1.28.0,<1.29.0)"] +honeycode = ["mypy-boto3-honeycode (>=1.28.0,<1.29.0)"] +iam = ["mypy-boto3-iam (>=1.28.0,<1.29.0)"] +identitystore = ["mypy-boto3-identitystore (>=1.28.0,<1.29.0)"] +imagebuilder = ["mypy-boto3-imagebuilder (>=1.28.0,<1.29.0)"] +importexport = ["mypy-boto3-importexport (>=1.28.0,<1.29.0)"] +inspector = ["mypy-boto3-inspector (>=1.28.0,<1.29.0)"] +inspector2 = ["mypy-boto3-inspector2 (>=1.28.0,<1.29.0)"] +internetmonitor = ["mypy-boto3-internetmonitor (>=1.28.0,<1.29.0)"] +iot = ["mypy-boto3-iot (>=1.28.0,<1.29.0)"] +iot-data = ["mypy-boto3-iot-data (>=1.28.0,<1.29.0)"] +iot-jobs-data = ["mypy-boto3-iot-jobs-data (>=1.28.0,<1.29.0)"] +iot-roborunner = ["mypy-boto3-iot-roborunner (>=1.28.0,<1.29.0)"] +iot1click-devices = ["mypy-boto3-iot1click-devices (>=1.28.0,<1.29.0)"] +iot1click-projects = ["mypy-boto3-iot1click-projects (>=1.28.0,<1.29.0)"] +iotanalytics = ["mypy-boto3-iotanalytics (>=1.28.0,<1.29.0)"] +iotdeviceadvisor = ["mypy-boto3-iotdeviceadvisor (>=1.28.0,<1.29.0)"] +iotevents = ["mypy-boto3-iotevents (>=1.28.0,<1.29.0)"] +iotevents-data = ["mypy-boto3-iotevents-data (>=1.28.0,<1.29.0)"] +iotfleethub = ["mypy-boto3-iotfleethub (>=1.28.0,<1.29.0)"] +iotfleetwise = ["mypy-boto3-iotfleetwise (>=1.28.0,<1.29.0)"] +iotsecuretunneling = ["mypy-boto3-iotsecuretunneling (>=1.28.0,<1.29.0)"] +iotsitewise = ["mypy-boto3-iotsitewise (>=1.28.0,<1.29.0)"] +iotthingsgraph = ["mypy-boto3-iotthingsgraph (>=1.28.0,<1.29.0)"] +iottwinmaker = ["mypy-boto3-iottwinmaker (>=1.28.0,<1.29.0)"] +iotwireless = ["mypy-boto3-iotwireless (>=1.28.0,<1.29.0)"] +ivs = ["mypy-boto3-ivs (>=1.28.0,<1.29.0)"] +ivs-realtime = ["mypy-boto3-ivs-realtime (>=1.28.0,<1.29.0)"] +ivschat = ["mypy-boto3-ivschat (>=1.28.0,<1.29.0)"] +kafka = ["mypy-boto3-kafka (>=1.28.0,<1.29.0)"] +kafkaconnect = ["mypy-boto3-kafkaconnect (>=1.28.0,<1.29.0)"] +kendra = ["mypy-boto3-kendra (>=1.28.0,<1.29.0)"] +kendra-ranking = ["mypy-boto3-kendra-ranking (>=1.28.0,<1.29.0)"] +keyspaces = ["mypy-boto3-keyspaces (>=1.28.0,<1.29.0)"] +kinesis = ["mypy-boto3-kinesis (>=1.28.0,<1.29.0)"] +kinesis-video-archived-media = ["mypy-boto3-kinesis-video-archived-media (>=1.28.0,<1.29.0)"] +kinesis-video-media = ["mypy-boto3-kinesis-video-media (>=1.28.0,<1.29.0)"] +kinesis-video-signaling = ["mypy-boto3-kinesis-video-signaling (>=1.28.0,<1.29.0)"] +kinesis-video-webrtc-storage = ["mypy-boto3-kinesis-video-webrtc-storage (>=1.28.0,<1.29.0)"] +kinesisanalytics = ["mypy-boto3-kinesisanalytics (>=1.28.0,<1.29.0)"] +kinesisanalyticsv2 = ["mypy-boto3-kinesisanalyticsv2 (>=1.28.0,<1.29.0)"] +kinesisvideo = ["mypy-boto3-kinesisvideo (>=1.28.0,<1.29.0)"] +kms = ["mypy-boto3-kms (>=1.28.0,<1.29.0)"] +lakeformation = ["mypy-boto3-lakeformation (>=1.28.0,<1.29.0)"] +lambda = ["mypy-boto3-lambda (>=1.28.0,<1.29.0)"] +lex-models = ["mypy-boto3-lex-models (>=1.28.0,<1.29.0)"] +lex-runtime = ["mypy-boto3-lex-runtime (>=1.28.0,<1.29.0)"] +lexv2-models = ["mypy-boto3-lexv2-models (>=1.28.0,<1.29.0)"] +lexv2-runtime = ["mypy-boto3-lexv2-runtime (>=1.28.0,<1.29.0)"] +license-manager = ["mypy-boto3-license-manager (>=1.28.0,<1.29.0)"] +license-manager-linux-subscriptions = ["mypy-boto3-license-manager-linux-subscriptions (>=1.28.0,<1.29.0)"] +license-manager-user-subscriptions = ["mypy-boto3-license-manager-user-subscriptions (>=1.28.0,<1.29.0)"] +lightsail = ["mypy-boto3-lightsail (>=1.28.0,<1.29.0)"] +location = ["mypy-boto3-location (>=1.28.0,<1.29.0)"] +logs = ["mypy-boto3-logs (>=1.28.0,<1.29.0)"] +lookoutequipment = ["mypy-boto3-lookoutequipment (>=1.28.0,<1.29.0)"] +lookoutmetrics = ["mypy-boto3-lookoutmetrics (>=1.28.0,<1.29.0)"] +lookoutvision = ["mypy-boto3-lookoutvision (>=1.28.0,<1.29.0)"] +m2 = ["mypy-boto3-m2 (>=1.28.0,<1.29.0)"] +machinelearning = ["mypy-boto3-machinelearning (>=1.28.0,<1.29.0)"] +macie = ["mypy-boto3-macie (>=1.28.0,<1.29.0)"] +macie2 = ["mypy-boto3-macie2 (>=1.28.0,<1.29.0)"] +managedblockchain = ["mypy-boto3-managedblockchain (>=1.28.0,<1.29.0)"] +managedblockchain-query = ["mypy-boto3-managedblockchain-query (>=1.28.0,<1.29.0)"] +marketplace-catalog = ["mypy-boto3-marketplace-catalog (>=1.28.0,<1.29.0)"] +marketplace-entitlement = ["mypy-boto3-marketplace-entitlement (>=1.28.0,<1.29.0)"] +marketplacecommerceanalytics = ["mypy-boto3-marketplacecommerceanalytics (>=1.28.0,<1.29.0)"] +mediaconnect = ["mypy-boto3-mediaconnect (>=1.28.0,<1.29.0)"] +mediaconvert = ["mypy-boto3-mediaconvert (>=1.28.0,<1.29.0)"] +medialive = ["mypy-boto3-medialive (>=1.28.0,<1.29.0)"] +mediapackage = ["mypy-boto3-mediapackage (>=1.28.0,<1.29.0)"] +mediapackage-vod = ["mypy-boto3-mediapackage-vod (>=1.28.0,<1.29.0)"] +mediapackagev2 = ["mypy-boto3-mediapackagev2 (>=1.28.0,<1.29.0)"] +mediastore = ["mypy-boto3-mediastore (>=1.28.0,<1.29.0)"] +mediastore-data = ["mypy-boto3-mediastore-data (>=1.28.0,<1.29.0)"] +mediatailor = ["mypy-boto3-mediatailor (>=1.28.0,<1.29.0)"] +medical-imaging = ["mypy-boto3-medical-imaging (>=1.28.0,<1.29.0)"] +memorydb = ["mypy-boto3-memorydb (>=1.28.0,<1.29.0)"] +meteringmarketplace = ["mypy-boto3-meteringmarketplace (>=1.28.0,<1.29.0)"] +mgh = ["mypy-boto3-mgh (>=1.28.0,<1.29.0)"] +mgn = ["mypy-boto3-mgn (>=1.28.0,<1.29.0)"] +migration-hub-refactor-spaces = ["mypy-boto3-migration-hub-refactor-spaces (>=1.28.0,<1.29.0)"] +migrationhub-config = ["mypy-boto3-migrationhub-config (>=1.28.0,<1.29.0)"] +migrationhuborchestrator = ["mypy-boto3-migrationhuborchestrator (>=1.28.0,<1.29.0)"] +migrationhubstrategy = ["mypy-boto3-migrationhubstrategy (>=1.28.0,<1.29.0)"] +mobile = ["mypy-boto3-mobile (>=1.28.0,<1.29.0)"] +mq = ["mypy-boto3-mq (>=1.28.0,<1.29.0)"] +mturk = ["mypy-boto3-mturk (>=1.28.0,<1.29.0)"] +mwaa = ["mypy-boto3-mwaa (>=1.28.0,<1.29.0)"] +neptune = ["mypy-boto3-neptune (>=1.28.0,<1.29.0)"] +neptunedata = ["mypy-boto3-neptunedata (>=1.28.0,<1.29.0)"] +network-firewall = ["mypy-boto3-network-firewall (>=1.28.0,<1.29.0)"] +networkmanager = ["mypy-boto3-networkmanager (>=1.28.0,<1.29.0)"] +nimble = ["mypy-boto3-nimble (>=1.28.0,<1.29.0)"] +oam = ["mypy-boto3-oam (>=1.28.0,<1.29.0)"] +omics = ["mypy-boto3-omics (>=1.28.0,<1.29.0)"] +opensearch = ["mypy-boto3-opensearch (>=1.28.0,<1.29.0)"] +opensearchserverless = ["mypy-boto3-opensearchserverless (>=1.28.0,<1.29.0)"] +opsworks = ["mypy-boto3-opsworks (>=1.28.0,<1.29.0)"] +opsworkscm = ["mypy-boto3-opsworkscm (>=1.28.0,<1.29.0)"] +organizations = ["mypy-boto3-organizations (>=1.28.0,<1.29.0)"] +osis = ["mypy-boto3-osis (>=1.28.0,<1.29.0)"] +outposts = ["mypy-boto3-outposts (>=1.28.0,<1.29.0)"] +panorama = ["mypy-boto3-panorama (>=1.28.0,<1.29.0)"] +payment-cryptography = ["mypy-boto3-payment-cryptography (>=1.28.0,<1.29.0)"] +payment-cryptography-data = ["mypy-boto3-payment-cryptography-data (>=1.28.0,<1.29.0)"] +pca-connector-ad = ["mypy-boto3-pca-connector-ad (>=1.28.0,<1.29.0)"] +personalize = ["mypy-boto3-personalize (>=1.28.0,<1.29.0)"] +personalize-events = ["mypy-boto3-personalize-events (>=1.28.0,<1.29.0)"] +personalize-runtime = ["mypy-boto3-personalize-runtime (>=1.28.0,<1.29.0)"] +pi = ["mypy-boto3-pi (>=1.28.0,<1.29.0)"] +pinpoint = ["mypy-boto3-pinpoint (>=1.28.0,<1.29.0)"] +pinpoint-email = ["mypy-boto3-pinpoint-email (>=1.28.0,<1.29.0)"] +pinpoint-sms-voice = ["mypy-boto3-pinpoint-sms-voice (>=1.28.0,<1.29.0)"] +pinpoint-sms-voice-v2 = ["mypy-boto3-pinpoint-sms-voice-v2 (>=1.28.0,<1.29.0)"] +pipes = ["mypy-boto3-pipes (>=1.28.0,<1.29.0)"] +polly = ["mypy-boto3-polly (>=1.28.0,<1.29.0)"] +pricing = ["mypy-boto3-pricing (>=1.28.0,<1.29.0)"] +privatenetworks = ["mypy-boto3-privatenetworks (>=1.28.0,<1.29.0)"] +proton = ["mypy-boto3-proton (>=1.28.0,<1.29.0)"] +qldb = ["mypy-boto3-qldb (>=1.28.0,<1.29.0)"] +qldb-session = ["mypy-boto3-qldb-session (>=1.28.0,<1.29.0)"] +quicksight = ["mypy-boto3-quicksight (>=1.28.0,<1.29.0)"] +ram = ["mypy-boto3-ram (>=1.28.0,<1.29.0)"] +rbin = ["mypy-boto3-rbin (>=1.28.0,<1.29.0)"] +rds = ["mypy-boto3-rds (>=1.28.0,<1.29.0)"] +rds-data = ["mypy-boto3-rds-data (>=1.28.0,<1.29.0)"] +redshift = ["mypy-boto3-redshift (>=1.28.0,<1.29.0)"] +redshift-data = ["mypy-boto3-redshift-data (>=1.28.0,<1.29.0)"] +redshift-serverless = ["mypy-boto3-redshift-serverless (>=1.28.0,<1.29.0)"] +rekognition = ["mypy-boto3-rekognition (>=1.28.0,<1.29.0)"] +resiliencehub = ["mypy-boto3-resiliencehub (>=1.28.0,<1.29.0)"] +resource-explorer-2 = ["mypy-boto3-resource-explorer-2 (>=1.28.0,<1.29.0)"] +resource-groups = ["mypy-boto3-resource-groups (>=1.28.0,<1.29.0)"] +resourcegroupstaggingapi = ["mypy-boto3-resourcegroupstaggingapi (>=1.28.0,<1.29.0)"] +robomaker = ["mypy-boto3-robomaker (>=1.28.0,<1.29.0)"] +rolesanywhere = ["mypy-boto3-rolesanywhere (>=1.28.0,<1.29.0)"] +route53 = ["mypy-boto3-route53 (>=1.28.0,<1.29.0)"] +route53-recovery-cluster = ["mypy-boto3-route53-recovery-cluster (>=1.28.0,<1.29.0)"] +route53-recovery-control-config = ["mypy-boto3-route53-recovery-control-config (>=1.28.0,<1.29.0)"] +route53-recovery-readiness = ["mypy-boto3-route53-recovery-readiness (>=1.28.0,<1.29.0)"] +route53domains = ["mypy-boto3-route53domains (>=1.28.0,<1.29.0)"] +route53resolver = ["mypy-boto3-route53resolver (>=1.28.0,<1.29.0)"] +rum = ["mypy-boto3-rum (>=1.28.0,<1.29.0)"] +s3 = ["mypy-boto3-s3 (>=1.28.0,<1.29.0)"] +s3control = ["mypy-boto3-s3control (>=1.28.0,<1.29.0)"] +s3outposts = ["mypy-boto3-s3outposts (>=1.28.0,<1.29.0)"] +sagemaker = ["mypy-boto3-sagemaker (>=1.28.0,<1.29.0)"] +sagemaker-a2i-runtime = ["mypy-boto3-sagemaker-a2i-runtime (>=1.28.0,<1.29.0)"] +sagemaker-edge = ["mypy-boto3-sagemaker-edge (>=1.28.0,<1.29.0)"] +sagemaker-featurestore-runtime = ["mypy-boto3-sagemaker-featurestore-runtime (>=1.28.0,<1.29.0)"] +sagemaker-geospatial = ["mypy-boto3-sagemaker-geospatial (>=1.28.0,<1.29.0)"] +sagemaker-metrics = ["mypy-boto3-sagemaker-metrics (>=1.28.0,<1.29.0)"] +sagemaker-runtime = ["mypy-boto3-sagemaker-runtime (>=1.28.0,<1.29.0)"] +savingsplans = ["mypy-boto3-savingsplans (>=1.28.0,<1.29.0)"] +scheduler = ["mypy-boto3-scheduler (>=1.28.0,<1.29.0)"] +schemas = ["mypy-boto3-schemas (>=1.28.0,<1.29.0)"] +sdb = ["mypy-boto3-sdb (>=1.28.0,<1.29.0)"] +secretsmanager = ["mypy-boto3-secretsmanager (>=1.28.0,<1.29.0)"] +securityhub = ["mypy-boto3-securityhub (>=1.28.0,<1.29.0)"] +securitylake = ["mypy-boto3-securitylake (>=1.28.0,<1.29.0)"] +serverlessrepo = ["mypy-boto3-serverlessrepo (>=1.28.0,<1.29.0)"] +service-quotas = ["mypy-boto3-service-quotas (>=1.28.0,<1.29.0)"] +servicecatalog = ["mypy-boto3-servicecatalog (>=1.28.0,<1.29.0)"] +servicecatalog-appregistry = ["mypy-boto3-servicecatalog-appregistry (>=1.28.0,<1.29.0)"] +servicediscovery = ["mypy-boto3-servicediscovery (>=1.28.0,<1.29.0)"] +ses = ["mypy-boto3-ses (>=1.28.0,<1.29.0)"] +sesv2 = ["mypy-boto3-sesv2 (>=1.28.0,<1.29.0)"] +shield = ["mypy-boto3-shield (>=1.28.0,<1.29.0)"] +signer = ["mypy-boto3-signer (>=1.28.0,<1.29.0)"] +simspaceweaver = ["mypy-boto3-simspaceweaver (>=1.28.0,<1.29.0)"] +sms = ["mypy-boto3-sms (>=1.28.0,<1.29.0)"] +sms-voice = ["mypy-boto3-sms-voice (>=1.28.0,<1.29.0)"] +snow-device-management = ["mypy-boto3-snow-device-management (>=1.28.0,<1.29.0)"] +snowball = ["mypy-boto3-snowball (>=1.28.0,<1.29.0)"] +sns = ["mypy-boto3-sns (>=1.28.0,<1.29.0)"] +sqs = ["mypy-boto3-sqs (>=1.28.0,<1.29.0)"] +ssm = ["mypy-boto3-ssm (>=1.28.0,<1.29.0)"] +ssm-contacts = ["mypy-boto3-ssm-contacts (>=1.28.0,<1.29.0)"] +ssm-incidents = ["mypy-boto3-ssm-incidents (>=1.28.0,<1.29.0)"] +ssm-sap = ["mypy-boto3-ssm-sap (>=1.28.0,<1.29.0)"] +sso = ["mypy-boto3-sso (>=1.28.0,<1.29.0)"] +sso-admin = ["mypy-boto3-sso-admin (>=1.28.0,<1.29.0)"] +sso-oidc = ["mypy-boto3-sso-oidc (>=1.28.0,<1.29.0)"] +stepfunctions = ["mypy-boto3-stepfunctions (>=1.28.0,<1.29.0)"] +storagegateway = ["mypy-boto3-storagegateway (>=1.28.0,<1.29.0)"] +sts = ["mypy-boto3-sts (>=1.28.0,<1.29.0)"] +support = ["mypy-boto3-support (>=1.28.0,<1.29.0)"] +support-app = ["mypy-boto3-support-app (>=1.28.0,<1.29.0)"] +swf = ["mypy-boto3-swf (>=1.28.0,<1.29.0)"] +synthetics = ["mypy-boto3-synthetics (>=1.28.0,<1.29.0)"] +textract = ["mypy-boto3-textract (>=1.28.0,<1.29.0)"] +timestream-query = ["mypy-boto3-timestream-query (>=1.28.0,<1.29.0)"] +timestream-write = ["mypy-boto3-timestream-write (>=1.28.0,<1.29.0)"] +tnb = ["mypy-boto3-tnb (>=1.28.0,<1.29.0)"] +transcribe = ["mypy-boto3-transcribe (>=1.28.0,<1.29.0)"] +transfer = ["mypy-boto3-transfer (>=1.28.0,<1.29.0)"] +translate = ["mypy-boto3-translate (>=1.28.0,<1.29.0)"] +verifiedpermissions = ["mypy-boto3-verifiedpermissions (>=1.28.0,<1.29.0)"] +voice-id = ["mypy-boto3-voice-id (>=1.28.0,<1.29.0)"] +vpc-lattice = ["mypy-boto3-vpc-lattice (>=1.28.0,<1.29.0)"] +waf = ["mypy-boto3-waf (>=1.28.0,<1.29.0)"] +waf-regional = ["mypy-boto3-waf-regional (>=1.28.0,<1.29.0)"] +wafv2 = ["mypy-boto3-wafv2 (>=1.28.0,<1.29.0)"] +wellarchitected = ["mypy-boto3-wellarchitected (>=1.28.0,<1.29.0)"] +wisdom = ["mypy-boto3-wisdom (>=1.28.0,<1.29.0)"] +workdocs = ["mypy-boto3-workdocs (>=1.28.0,<1.29.0)"] +worklink = ["mypy-boto3-worklink (>=1.28.0,<1.29.0)"] +workmail = ["mypy-boto3-workmail (>=1.28.0,<1.29.0)"] +workmailmessageflow = ["mypy-boto3-workmailmessageflow (>=1.28.0,<1.29.0)"] +workspaces = ["mypy-boto3-workspaces (>=1.28.0,<1.29.0)"] +workspaces-web = ["mypy-boto3-workspaces-web (>=1.28.0,<1.29.0)"] +xray = ["mypy-boto3-xray (>=1.28.0,<1.29.0)"] + [[package]] name = "botocore" version = "1.31.41" @@ -38,6 +414,20 @@ urllib3 = ">=1.25.4,<1.27" [package.extras] crt = ["awscrt (==0.16.26)"] +[[package]] +name = "botocore-stubs" +version = "1.31.41" +description = "Type annotations and code completion for botocore" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "botocore_stubs-1.31.41-py3-none-any.whl", hash = "sha256:f28c63e1d3673f87e9b4131794f7fbc4e936cdc2a6c367efabf0149667d9a48a"}, + {file = "botocore_stubs-1.31.41.tar.gz", hash = "sha256:8738438ba52a6d97b3f5491290b72f5165d8be10d2e8f50a473081bb1eb1447a"}, +] + +[package.dependencies] +types-awscrt = "*" + [[package]] name = "cachetools" version = "5.3.1" @@ -316,6 +706,63 @@ files = [ {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, ] +[[package]] +name = "mypy" +version = "1.5.1" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f33592ddf9655a4894aef22d134de7393e95fcbdc2d15c1ab65828eee5c66c70"}, + {file = "mypy-1.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:258b22210a4a258ccd077426c7a181d789d1121aca6db73a83f79372f5569ae0"}, + {file = "mypy-1.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9ec1f695f0c25986e6f7f8778e5ce61659063268836a38c951200c57479cc12"}, + {file = "mypy-1.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:abed92d9c8f08643c7d831300b739562b0a6c9fcb028d211134fc9ab20ccad5d"}, + {file = "mypy-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:a156e6390944c265eb56afa67c74c0636f10283429171018446b732f1a05af25"}, + {file = "mypy-1.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6ac9c21bfe7bc9f7f1b6fae441746e6a106e48fc9de530dea29e8cd37a2c0cc4"}, + {file = "mypy-1.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51cb1323064b1099e177098cb939eab2da42fea5d818d40113957ec954fc85f4"}, + {file = "mypy-1.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:596fae69f2bfcb7305808c75c00f81fe2829b6236eadda536f00610ac5ec2243"}, + {file = "mypy-1.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:32cb59609b0534f0bd67faebb6e022fe534bdb0e2ecab4290d683d248be1b275"}, + {file = "mypy-1.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:159aa9acb16086b79bbb0016145034a1a05360626046a929f84579ce1666b315"}, + {file = "mypy-1.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f6b0e77db9ff4fda74de7df13f30016a0a663928d669c9f2c057048ba44f09bb"}, + {file = "mypy-1.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:26f71b535dfc158a71264e6dc805a9f8d2e60b67215ca0bfa26e2e1aa4d4d373"}, + {file = "mypy-1.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc3a600f749b1008cc75e02b6fb3d4db8dbcca2d733030fe7a3b3502902f161"}, + {file = "mypy-1.5.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:26fb32e4d4afa205b24bf645eddfbb36a1e17e995c5c99d6d00edb24b693406a"}, + {file = "mypy-1.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:82cb6193de9bbb3844bab4c7cf80e6227d5225cc7625b068a06d005d861ad5f1"}, + {file = "mypy-1.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4a465ea2ca12804d5b34bb056be3a29dc47aea5973b892d0417c6a10a40b2d65"}, + {file = "mypy-1.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9fece120dbb041771a63eb95e4896791386fe287fefb2837258925b8326d6160"}, + {file = "mypy-1.5.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d28ddc3e3dfeab553e743e532fb95b4e6afad51d4706dd22f28e1e5e664828d2"}, + {file = "mypy-1.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:57b10c56016adce71fba6bc6e9fd45d8083f74361f629390c556738565af8eeb"}, + {file = "mypy-1.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:ff0cedc84184115202475bbb46dd99f8dcb87fe24d5d0ddfc0fe6b8575c88d2f"}, + {file = "mypy-1.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8f772942d372c8cbac575be99f9cc9d9fb3bd95c8bc2de6c01411e2c84ebca8a"}, + {file = "mypy-1.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5d627124700b92b6bbaa99f27cbe615c8ea7b3402960f6372ea7d65faf376c14"}, + {file = "mypy-1.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:361da43c4f5a96173220eb53340ace68cda81845cd88218f8862dfb0adc8cddb"}, + {file = "mypy-1.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:330857f9507c24de5c5724235e66858f8364a0693894342485e543f5b07c8693"}, + {file = "mypy-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:c543214ffdd422623e9fedd0869166c2f16affe4ba37463975043ef7d2ea8770"}, + {file = "mypy-1.5.1-py3-none-any.whl", hash = "sha256:f757063a83970d67c444f6e01d9550a7402322af3557ce7630d3c957386fa8f5"}, + {file = "mypy-1.5.1.tar.gz", hash = "sha256:b031b9601f1060bf1281feab89697324726ba0c0bae9d7cd7ab4b690940f0b92"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + [[package]] name = "nodeenv" version = "1.8.0" @@ -389,6 +836,69 @@ nodeenv = ">=0.11.1" pyyaml = ">=5.1" virtualenv = ">=20.10.0" +[[package]] +name = "pydantic" +version = "1.10.12" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a1fcb59f2f355ec350073af41d927bf83a63b50e640f4dbaa01053a28b7a7718"}, + {file = "pydantic-1.10.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b7ccf02d7eb340b216ec33e53a3a629856afe1c6e0ef91d84a4e6f2fb2ca70fe"}, + {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fb2aa3ab3728d950bcc885a2e9eff6c8fc40bc0b7bb434e555c215491bcf48b"}, + {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:771735dc43cf8383959dc9b90aa281f0b6092321ca98677c5fb6125a6f56d58d"}, + {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ca48477862372ac3770969b9d75f1bf66131d386dba79506c46d75e6b48c1e09"}, + {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a5e7add47a5b5a40c49b3036d464e3c7802f8ae0d1e66035ea16aa5b7a3923ed"}, + {file = "pydantic-1.10.12-cp310-cp310-win_amd64.whl", hash = "sha256:e4129b528c6baa99a429f97ce733fff478ec955513630e61b49804b6cf9b224a"}, + {file = "pydantic-1.10.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0d191db0f92dfcb1dec210ca244fdae5cbe918c6050b342d619c09d31eea0cc"}, + {file = "pydantic-1.10.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:795e34e6cc065f8f498c89b894a3c6da294a936ee71e644e4bd44de048af1405"}, + {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69328e15cfda2c392da4e713443c7dbffa1505bc9d566e71e55abe14c97ddc62"}, + {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2031de0967c279df0d8a1c72b4ffc411ecd06bac607a212892757db7462fc494"}, + {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ba5b2e6fe6ca2b7e013398bc7d7b170e21cce322d266ffcd57cca313e54fb246"}, + {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2a7bac939fa326db1ab741c9d7f44c565a1d1e80908b3797f7f81a4f86bc8d33"}, + {file = "pydantic-1.10.12-cp311-cp311-win_amd64.whl", hash = "sha256:87afda5539d5140cb8ba9e8b8c8865cb5b1463924d38490d73d3ccfd80896b3f"}, + {file = "pydantic-1.10.12-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:549a8e3d81df0a85226963611950b12d2d334f214436a19537b2efed61b7639a"}, + {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598da88dfa127b666852bef6d0d796573a8cf5009ffd62104094a4fe39599565"}, + {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba5c4a8552bff16c61882db58544116d021d0b31ee7c66958d14cf386a5b5350"}, + {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c79e6a11a07da7374f46970410b41d5e266f7f38f6a17a9c4823db80dadf4303"}, + {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab26038b8375581dc832a63c948f261ae0aa21f1d34c1293469f135fa92972a5"}, + {file = "pydantic-1.10.12-cp37-cp37m-win_amd64.whl", hash = "sha256:e0a16d274b588767602b7646fa05af2782576a6cf1022f4ba74cbb4db66f6ca8"}, + {file = "pydantic-1.10.12-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6a9dfa722316f4acf4460afdf5d41d5246a80e249c7ff475c43a3a1e9d75cf62"}, + {file = "pydantic-1.10.12-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a73f489aebd0c2121ed974054cb2759af8a9f747de120acd2c3394cf84176ccb"}, + {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b30bcb8cbfccfcf02acb8f1a261143fab622831d9c0989707e0e659f77a18e0"}, + {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fcfb5296d7877af406ba1547dfde9943b1256d8928732267e2653c26938cd9c"}, + {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2f9a6fab5f82ada41d56b0602606a5506aab165ca54e52bc4545028382ef1c5d"}, + {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dea7adcc33d5d105896401a1f37d56b47d443a2b2605ff8a969a0ed5543f7e33"}, + {file = "pydantic-1.10.12-cp38-cp38-win_amd64.whl", hash = "sha256:1eb2085c13bce1612da8537b2d90f549c8cbb05c67e8f22854e201bde5d98a47"}, + {file = "pydantic-1.10.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ef6c96b2baa2100ec91a4b428f80d8f28a3c9e53568219b6c298c1125572ebc6"}, + {file = "pydantic-1.10.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c076be61cd0177a8433c0adcb03475baf4ee91edf5a4e550161ad57fc90f523"}, + {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5a58feb9a39f481eda4d5ca220aa8b9d4f21a41274760b9bc66bfd72595b86"}, + {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5f805d2d5d0a41633651a73fa4ecdd0b3d7a49de4ec3fadf062fe16501ddbf1"}, + {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1289c180abd4bd4555bb927c42ee42abc3aee02b0fb2d1223fb7c6e5bef87dbe"}, + {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5d1197e462e0364906cbc19681605cb7c036f2475c899b6f296104ad42b9f5fb"}, + {file = "pydantic-1.10.12-cp39-cp39-win_amd64.whl", hash = "sha256:fdbdd1d630195689f325c9ef1a12900524dceb503b00a987663ff4f58669b93d"}, + {file = "pydantic-1.10.12-py3-none-any.whl", hash = "sha256:b749a43aa51e32839c9d71dc67eb1e4221bb04af1033a32e3923d46f9effa942"}, + {file = "pydantic-1.10.12.tar.gz", hash = "sha256:0fe8a415cea8f340e7a9af9c54fc71a649b43e8ca3cc732986116b3cb135d303"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyfakefs" +version = "5.2.4" +description = "pyfakefs implements a fake file system that mocks the Python file system modules." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyfakefs-5.2.4-py3-none-any.whl", hash = "sha256:8eb95f1dd1c4b8bdce30448fe169875e3a4451c32d3f9c37799157bd4eb7b789"}, + {file = "pyfakefs-5.2.4.tar.gz", hash = "sha256:3e040f3792086086a0dc2191b05fe709438e168aafe2e94fcdbef8e3859208d8"}, +] + [[package]] name = "pyproject-api" version = "1.6.1" @@ -430,6 +940,20 @@ tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +[[package]] +name = "pytest-timeout" +version = "2.1.0" +description = "pytest plugin to abort hanging tests" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-timeout-2.1.0.tar.gz", hash = "sha256:c07ca07404c612f8abbe22294b23c368e2e5104b521c1790195561f37e1ac3d9"}, + {file = "pytest_timeout-2.1.0-py3-none-any.whl", hash = "sha256:f6f50101443ce70ad325ceb4473c4255e9d74e3c7cd0ef827309dfa4c0d975c6"}, +] + +[package.dependencies] +pytest = ">=5.0.0" + [[package]] name = "python-dateutil" version = "2.8.2" @@ -547,6 +1071,25 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "requests-mock" +version = "1.11.0" +description = "Mock out responses from the requests package" +optional = false +python-versions = "*" +files = [ + {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, + {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, +] + +[package.dependencies] +requests = ">=2.3,<3" +six = "*" + +[package.extras] +fixture = ["fixtures"] +test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] + [[package]] name = "s3transfer" version = "0.6.2" @@ -662,6 +1205,50 @@ tox = ">=4,<5" [package.extras] testing = ["black", "devpi-process", "flake8 (>=6,<7)", "mypy", "pytest (>=7,<8)", "pytest-cov (>=3,<4)", "pytest-mock (>=3,<4)", "pytest-randomly (>=3)"] +[[package]] +name = "types-awscrt" +version = "0.19.1" +description = "Type annotations and code completion for awscrt" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "types_awscrt-0.19.1-py3-none-any.whl", hash = "sha256:68fffeb75396e9e7614cd930b2d52295f680230774750907bcafb56f11514043"}, + {file = "types_awscrt-0.19.1.tar.gz", hash = "sha256:61833aa140e724a9098025610f4b8cde3dcf65b842631d7447378f9f5db4e1fd"}, +] + +[[package]] +name = "types-python-dateutil" +version = "2.8.19.14" +description = "Typing stubs for python-dateutil" +optional = false +python-versions = "*" +files = [ + {file = "types-python-dateutil-2.8.19.14.tar.gz", hash = "sha256:1f4f10ac98bb8b16ade9dbee3518d9ace017821d94b057a425b069f834737f4b"}, + {file = "types_python_dateutil-2.8.19.14-py3-none-any.whl", hash = "sha256:f977b8de27787639986b4e28963263fd0e5158942b3ecef91b9335c130cb1ce9"}, +] + +[[package]] +name = "types-pytz" +version = "2023.3.0.1" +description = "Typing stubs for pytz" +optional = false +python-versions = "*" +files = [ + {file = "types-pytz-2023.3.0.1.tar.gz", hash = "sha256:1a7b8d4aac70981cfa24478a41eadfcd96a087c986d6f150d77e3ceb3c2bdfab"}, + {file = "types_pytz-2023.3.0.1-py3-none-any.whl", hash = "sha256:65152e872137926bb67a8fe6cc9cfd794365df86650c5d5fdc7b167b0f38892e"}, +] + +[[package]] +name = "types-s3transfer" +version = "0.6.2" +description = "Type annotations and code completion for s3transfer" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "types_s3transfer-0.6.2-py3-none-any.whl", hash = "sha256:1068877b6e59be5226fa3006ae64371ac9d5bc590dfdbd9c66fd0a075d3254ac"}, + {file = "types_s3transfer-0.6.2.tar.gz", hash = "sha256:4ba9b483796fdcd026aa162ee03bdcedd2bf7d08e9387c820dcdd158b0102057"}, +] + [[package]] name = "typing-extensions" version = "4.7.1" @@ -728,4 +1315,4 @@ test = ["websockets"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "19b3c796b95ba91307a777f997262bffef23f4484ddc3bb812ecf15a9631346e" +content-hash = "cb76b2c69b81689d426d379e4ff07b55ed5ac5b70cfdce9b1380622f6e95cc0f" diff --git a/pyproject.toml b/pyproject.toml index 4f4cf78..47cee0c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,7 +19,40 @@ python = "^3.9" boto3 = "^1.18" botocore = "^1.21" click = "^8.1.3" -pytest= ">=7.2.0" +pydantic = "^1.10.9" + + +[tool.poetry.group.dev.dependencies] +boto3-stubs = "^1.26.81" +botocore-stubs = "^1.29.81" +mypy = "^1.4.1" +pyfakefs = "~5.2.0" +pytest = ">=7.2.0" +pytest-timeout = "*" +requests-mock = "1.11.0" +types-python-dateutil = "^2.8.19" +types-pytz = "^2023.2" + +[tool.isort] +known_first_party = ["core" ] +profile = "black" + +[tool.mypy] +check_untyped_defs = true +disable_error_code = "annotation-unchecked" +exclude = [] +files = ["."] +plugins = ["pydantic.mypy",] + + +[[tool.mypy.overrides]] +# In our tests, we often overwrite methods on classes to mock out behavior. +# This is a common pattern in Python, but mypy doesn't like it. This override +# silences those errors, but only for the tests module. +# See discussion here: +# https://github.com/python/mypy/issues/2427 +disable_error_code = "method-assign" +module = "tests.*" [build-system] diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/core/__init__.py b/tests/core/__init__.py new file mode 100644 index 0000000..e69de29 From 3915ea7a12e094f2f398a9c80dd2c4064c978d0f Mon Sep 17 00:00:00 2001 From: Daniel Bernstein Date: Thu, 7 Sep 2023 11:37:11 -0700 Subject: [PATCH 20/30] Add prereq and installation sections to README.md. --- README.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/README.md b/README.md index 36669c3..9bc4753 100644 --- a/README.md +++ b/README.md @@ -3,6 +3,17 @@ A suite of command line operations for exporting and importing quicksight dashboards from and to AWS accounts. Exported resources can be found [here](https://github.com/ThePalaceProject/palace-quicksight-resources). +## Prerequisites + +Install Poetry: [Installation instructions here](https://python-poetry.org/docs/). + +## Installation + +```shell +poetry install +poetry run ./bin/palace-quicksight --help +``` + ## Usage ```shell From d97004dfbdf7c53490ddddefc942506e90da4c89 Mon Sep 17 00:00:00 2001 From: Daniel Bernstein Date: Tue, 19 Sep 2023 13:16:01 -0700 Subject: [PATCH 21/30] Add import test. --- .../assets/data-sets/circulation_view.json | 48 ++++ .../assets/data-sets/patron_events.json | 48 ++++ .../resources/assets/templates/library.json | 40 ++++ .../test_export_analysis_operation.py | 1 + .../test_import_template_operation.py | 219 ++++++++++++++++++ 5 files changed, 356 insertions(+) create mode 100644 tests/core/operation/resources/assets/data-sets/circulation_view.json create mode 100644 tests/core/operation/resources/assets/data-sets/patron_events.json create mode 100644 tests/core/operation/resources/assets/templates/library.json create mode 100644 tests/core/operation/test_import_template_operation.py diff --git a/tests/core/operation/resources/assets/data-sets/circulation_view.json b/tests/core/operation/resources/assets/data-sets/circulation_view.json new file mode 100644 index 0000000..3f8bc88 --- /dev/null +++ b/tests/core/operation/resources/assets/data-sets/circulation_view.json @@ -0,0 +1,48 @@ +{ + "Name": "circulation_view", + "PhysicalTableMap": { + "25046cd8-e08f-41e0-8af8-5259b64499fd": { + "CustomSql": { + "DataSourceArn": "", + "Name": "circulation_view", + "SqlQuery": "sql query", + "Columns": [ + { + "Name": "time_stamp", + "Type": "DATETIME" + } + ] + } + } + }, + "LogicalTableMap": { + "6c80275e-d03d-417c-a8cd-57d93e58129b": { + "Alias": "circulation_view", + "DataTransforms": [ + { + "ProjectOperation": { + "ProjectedColumns": [ + "time_stamp" + ] + } + } + ], + "Source": { + "PhysicalTableId": "25046cd8-e08f-41e0-8af8-5259b64499fd" + } + } + }, + "OutputColumns": [ + { + "Name": "time_stamp", + "Type": "DATETIME" + } + ], + "ImportMode": "DIRECT_QUERY", + "ConsumedSpiceCapacityInBytes": 0, + "FieldFolders": {}, + "DataSetUsageConfiguration": { + "DisableUseAsDirectQuerySource": false, + "DisableUseAsImportedSource": false + } +} diff --git a/tests/core/operation/resources/assets/data-sets/patron_events.json b/tests/core/operation/resources/assets/data-sets/patron_events.json new file mode 100644 index 0000000..a594b6c --- /dev/null +++ b/tests/core/operation/resources/assets/data-sets/patron_events.json @@ -0,0 +1,48 @@ +{ + "Name": "patron_events", + "PhysicalTableMap": { + "50873ea6-0c3a-4989-97e1-eb740e8a3348": { + "CustomSql": { + "DataSourceArn": "", + "Name": "patron_events", + "SqlQuery": "sql query", + "Columns": [ + { + "Name": "time_stamp", + "Type": "DATETIME" + } + ] + } + } + }, + "LogicalTableMap": { + "4dc4e51c-76b2-4595-8b3b-1759f76a05c4": { + "Alias": "patron_events", + "DataTransforms": [ + { + "ProjectOperation": { + "ProjectedColumns": [ + "time_stamp" + ] + } + } + ], + "Source": { + "PhysicalTableId": "50873ea6-0c3a-4989-97e1-eb740e8a3348" + } + } + }, + "OutputColumns": [ + { + "Name": "time_stamp", + "Type": "DATETIME" + } + ], + "ImportMode": "DIRECT_QUERY", + "ConsumedSpiceCapacityInBytes": 0, + "FieldFolders": {}, + "DataSetUsageConfiguration": { + "DisableUseAsDirectQuerySource": false, + "DisableUseAsImportedSource": false + } +} diff --git a/tests/core/operation/resources/assets/templates/library.json b/tests/core/operation/resources/assets/templates/library.json new file mode 100644 index 0000000..6acb96b --- /dev/null +++ b/tests/core/operation/resources/assets/templates/library.json @@ -0,0 +1,40 @@ +{ + "Name": "library", + "Definition": { + "DataSetConfigurations": [ + { + "Placeholder": "circulation_view", + "DataSetSchema": { + "ColumnSchemaList": [ + ] + }, + "ColumnGroupSchemaList": [] + }, + { + "Placeholder": "patron_events", + "DataSetSchema": { + "ColumnSchemaList": [ + ] + }, + "ColumnGroupSchemaList": [] + } + ], + "Sheets": [], + "AnalysisDefaults": { + "DefaultNewSheetConfiguration": { + "InteractiveLayoutConfiguration": { + "Grid": { + "CanvasSizeOptions": { + "ScreenCanvasSizeOptions": { + "ResizeOption": "FIXED", + "OptimizedViewPortWidth": "1600px" + } + } + } + }, + "SheetContentType": "INTERACTIVE" + } + } + }, + "TemplateId": "library-template" +} diff --git a/tests/core/operation/test_export_analysis_operation.py b/tests/core/operation/test_export_analysis_operation.py index e2278b0..f19a2cc 100644 --- a/tests/core/operation/test_export_analysis_operation.py +++ b/tests/core/operation/test_export_analysis_operation.py @@ -28,6 +28,7 @@ def test(self): qs_client = botocore.session.get_session().create_client( "quicksight", config=boto_config ) + with Stubber(qs_client) as stub: analysis_description_params = { "AwsAccountId": account, diff --git a/tests/core/operation/test_import_template_operation.py b/tests/core/operation/test_import_template_operation.py new file mode 100644 index 0000000..030bdc2 --- /dev/null +++ b/tests/core/operation/test_import_template_operation.py @@ -0,0 +1,219 @@ +import botocore +from botocore.config import Config +from botocore.stub import Stubber + +from core.operation.import_from_json_operation import ImportFromJsonOperation + + +def create_data_set_response(target_namespace, data_set_name): + new_ds_name = f"{target_namespace}-{data_set_name}" + response = { + "ResponseMetadata": { + "RequestId": "3aecd4ed-9a15-408a-a251-532718e574bd", + "HTTPStatusCode": 200, + "HTTPHeaders": { + "date": "Tue, 19 Sep 2023 17:54:43 GMT", + "content-type": "application/json", + "content-length": "215", + "connection": "keep-alive", + "x-amzn-requestid": "3aecd4ed-9a15-408a-a251-532718e574bd", + }, + "RetryAttempts": 0, + }, + "Status": 200, + "Arn": f"arn:aws:quicksight:us-west-2:128682227026:dataset/{new_ds_name}", + "DataSetId": new_ds_name, + "RequestId": "3aecd4ed-9a15-408a-a251-532718e574bd", + } + return response + + +def create_data_set_params1( + input_dir, target_namespace, data_source_arn, aws_account_id +): + data_set_params = { + "Name": "circulation_view", + "PhysicalTableMap": { + "25046cd8-e08f-41e0-8af8-5259b64499fd": { + "CustomSql": { + "DataSourceArn": data_source_arn, + "Name": "circulation_view", + "SqlQuery": "sql query", + "Columns": [{"Name": "time_stamp", "Type": "DATETIME"}], + } + } + }, + "LogicalTableMap": { + "6c80275e-d03d-417c-a8cd-57d93e58129b": { + "Alias": "circulation_view", + "DataTransforms": [ + {"ProjectOperation": {"ProjectedColumns": ["time_stamp"]}} + ], + "Source": {"PhysicalTableId": "25046cd8-e08f-41e0-8af8-5259b64499fd"}, + } + }, + "ImportMode": "DIRECT_QUERY", + "FieldFolders": {}, + "DataSetUsageConfiguration": { + "DisableUseAsDirectQuerySource": False, + "DisableUseAsImportedSource": False, + }, + "AwsAccountId": aws_account_id, + "DataSetId": f"{target_namespace}-circulation_view", + } + return data_set_params + + +def create_data_set_params2( + input_dir, target_namespace, data_source_arn, aws_account_id +): + data_set_params = { + "Name": "patron_events", + "PhysicalTableMap": { + "50873ea6-0c3a-4989-97e1-eb740e8a3348": { + "CustomSql": { + "DataSourceArn": data_source_arn, + "Name": "patron_events", + "SqlQuery": "sql query", + "Columns": [{"Name": "time_stamp", "Type": "DATETIME"}], + } + } + }, + "LogicalTableMap": { + "4dc4e51c-76b2-4595-8b3b-1759f76a05c4": { + "Alias": "patron_events", + "DataTransforms": [ + {"ProjectOperation": {"ProjectedColumns": ["time_stamp"]}} + ], + "Source": {"PhysicalTableId": "50873ea6-0c3a-4989-97e1-eb740e8a3348"}, + } + }, + "ImportMode": "DIRECT_QUERY", + "FieldFolders": {}, + "DataSetUsageConfiguration": { + "DisableUseAsDirectQuerySource": False, + "DisableUseAsImportedSource": False, + }, + "AwsAccountId": aws_account_id, + "DataSetId": f"{target_namespace}-patron_events", + } + return data_set_params + + +def create_template_params(target_namespace, aws_account_id): + return { + "Name": f"{target_namespace}-library", + "TemplateId": f"{target_namespace}-library", + "AwsAccountId": aws_account_id, + "Definition": { + "DataSetConfigurations": [ + { + "Placeholder": "circulation_view", + "DataSetSchema": {"ColumnSchemaList": []}, + "ColumnGroupSchemaList": [], + }, + { + "Placeholder": "patron_events", + "DataSetSchema": {"ColumnSchemaList": []}, + "ColumnGroupSchemaList": [], + }, + ], + "Sheets": [], + "AnalysisDefaults": { + "DefaultNewSheetConfiguration": { + "InteractiveLayoutConfiguration": { + "Grid": { + "CanvasSizeOptions": { + "ScreenCanvasSizeOptions": { + "ResizeOption": "FIXED", + "OptimizedViewPortWidth": "1600px", + } + } + } + }, + "SheetContentType": "INTERACTIVE", + } + }, + }, + } + + return + + +def create_template_response(new_template_name): + return { + "ResponseMetadata": { + "RequestId": "09f0120c-92c9-4f16-8044-dc44b715f6db", + "HTTPStatusCode": 202, + "HTTPHeaders": { + "date": "Tue, 19 Sep 2023 17:32:44 GMT", + "content-type": "application/json", + "content-length": "293", + "connection": "keep-alive", + "x-amzn-requestid": "09f0120c-92c9-4f16-8044-dc44b715f6db", + }, + "RetryAttempts": 0, + }, + "Status": 202, + "TemplateId": new_template_name, + "Arn": f"arn:aws:quicksight:us-west-2:128682227026:template/{new_template_name}", + "VersionArn": f"arn:aws:quicksight:us-west-2:128682227026:template/{new_template_name}/version/4", + "CreationStatus": "CREATION_IN_PROGRESS", + "RequestId": "09f0120c-92c9-4f16-8044-dc44b715f6db", + } + + +class TestImportTemplateOperation: + def test(self): + template_name = "library" + input_dir = "tests/core/operation/resources" + account = "012345678910" + target_namespace = "my_env" + data_source_arn = "my_data_source_arn" + + boto_config = Config( + region_name="us-east-1", + ) + + new_template_name = target_namespace + "-" + template_name + + qs_client = botocore.session.get_session().create_client( + "quicksight", config=boto_config + ) + with Stubber(qs_client) as stub: + stub.add_response( + "create_template", + service_response=create_template_response(new_template_name), + expected_params=create_template_params(target_namespace, account), + ) + + stub.add_response( + "create_data_set", + service_response=create_data_set_response( + target_namespace, "circulation_view" + ), + expected_params=create_data_set_params1( + input_dir, target_namespace, data_source_arn, account + ), + ) + + stub.add_response( + "create_data_set", + service_response=create_data_set_response( + target_namespace, "patron_events" + ), + expected_params=create_data_set_params2( + input_dir, target_namespace, data_source_arn, account + ), + ) + + op = ImportFromJsonOperation( + qs_client=qs_client, + template_name=template_name, + target_namespace=target_namespace, + input_dir=input_dir, + aws_account_id=account, + data_source_arn=data_source_arn, + ) + + op.execute() From 6a7335acb60a8b36e4ac22a531bf92c4f23373ca Mon Sep 17 00:00:00 2001 From: Daniel Bernstein Date: Tue, 19 Sep 2023 15:05:46 -0700 Subject: [PATCH 22/30] Add publish test. --- .../publish_dashboard_from_template.py | 10 +- .../core/operation/test_publish_operation.py | 262 ++++++++++++++++++ 2 files changed, 271 insertions(+), 1 deletion(-) create mode 100644 tests/core/operation/test_publish_operation.py diff --git a/core/operation/publish_dashboard_from_template.py b/core/operation/publish_dashboard_from_template.py index ed4f1f8..3b30b0e 100644 --- a/core/operation/publish_dashboard_from_template.py +++ b/core/operation/publish_dashboard_from_template.py @@ -10,7 +10,7 @@ class PublishDashboardFromTemplateOperation(BaseOperation): """ def __init__( - self, template_id: str, target_namespace: str, group_name, *args, **kwargs + self, template_id: str, target_namespace: str, group_name: str, *args, **kwargs ): self._template_id = template_id self._target_namespace = target_namespace @@ -106,6 +106,14 @@ def execute(self): } response = self._qs_client.update_dashboard_permissions(**permissions_params) + httpStatus = response["ResponseMetadata"]["HTTPStatusCode"] + if httpStatus != 202 and httpStatus != 200: + self._log.error( + f"Unexpected response from update_dashboard_permissions request: {httpStatus} " + ) + raise Exception( + f"Unexpected response from trying to update_dashboard_permissions : {json.dumps(response, indent=4)} " + ) def _create_or_update_dashboard(self, dashboard_params: dict) -> tuple[str, str]: """ diff --git a/tests/core/operation/test_publish_operation.py b/tests/core/operation/test_publish_operation.py new file mode 100644 index 0000000..32e8b0d --- /dev/null +++ b/tests/core/operation/test_publish_operation.py @@ -0,0 +1,262 @@ +from datetime import datetime + +import botocore +from botocore.config import Config +from botocore.stub import Stubber +from dateutil.tz import tzlocal + +from core.operation.publish_dashboard_from_template import ( + PublishDashboardFromTemplateOperation, +) + + +class TestPublishDashboardFromTemplateOperation: + def test(self): + target_namespace = "my_env" + template_id = f"{target_namespace}-library" + account = "012345678910" + group_name = "my_group" + + boto_config = Config( + region_name="us-east-1", + ) + + qs_client = botocore.session.get_session().create_client( + "quicksight", config=boto_config + ) + + describe_template_definition_params = { + "AwsAccountId": account, + "TemplateId": template_id, + "AliasName": "$LATEST", + } + with Stubber(qs_client) as stub: + template_arn = f"arn:aws:quicksight:::template/{target_namespace}-library" + + stub.add_response( + "describe_template_definition", + expected_params=describe_template_definition_params, + service_response={ + "ResponseMetadata": { + "RequestId": "f89ac94e-34e1-4e45-9b12-c56af317e195", + "HTTPStatusCode": 200, + "HTTPHeaders": { + "date": "Tue, 19 Sep 2023 21:58:09 GMT", + "content-type": "application/json", + "content-length": "172398", + "connection": "keep-alive", + "x-amzn-requestid": "f89ac94e-34e1-4e45-9b12-c56af317e195", + }, + "RetryAttempts": 0, + }, + "Status": 200, + "Name": "tpp-prod-library", + "TemplateId": f"{target_namespace}-library", + "ResourceStatus": "CREATION_SUCCESSFUL", + "Definition": { + "DataSetConfigurations": [ + { + "Placeholder": "circulation_view", + "DataSetSchema": {"ColumnSchemaList": []}, + "ColumnGroupSchemaList": [], + }, + { + "Placeholder": "patron_events", + "DataSetSchema": {"ColumnSchemaList": []}, + "ColumnGroupSchemaList": [], + }, + ], + "Sheets": [], + "AnalysisDefaults": { + "DefaultNewSheetConfiguration": { + "InteractiveLayoutConfiguration": { + "Grid": { + "CanvasSizeOptions": { + "ScreenCanvasSizeOptions": { + "ResizeOption": "FIXED", + "OptimizedViewPortWidth": "1600px", + } + } + } + }, + "SheetContentType": "INTERACTIVE", + } + }, + }, + "RequestId": "f89ac94e-34e1-4e45-9b12-c56af317e195", + }, + ) + stub.add_response( + "describe_template", + service_response={ + "Template": { + "Arn": template_arn, + "Name": "tpp-prod-library", + "Version": { + "CreatedTime": datetime( + 2023, 9, 19, 11, 58, 37, 288000, tzinfo=tzlocal() + ), + "VersionNumber": 5, + "Status": "CREATION_SUCCESSFUL", + "DataSetConfigurations": [ + { + "Placeholder": "circulation_view", + "DataSetSchema": {"ColumnSchemaList": []}, + "ColumnGroupSchemaList": [], + }, + { + "Placeholder": "patron_events", + "DataSetSchema": {"ColumnSchemaList": []}, + "ColumnGroupSchemaList": [], + }, + ], + "Sheets": [], + }, + "TemplateId": "tpp-prod-library", + "LastUpdatedTime": datetime( + 2023, 9, 19, 11, 58, 37, 284000, tzinfo=tzlocal() + ), + "CreatedTime": datetime( + 2023, 9, 1, 12, 50, 43, 593000, tzinfo=tzlocal() + ), + } + }, + expected_params={"AwsAccountId": account, "TemplateId": template_id}, + ) + + namespace_arn = "arn:quicksight:::namespace/default" + + stub.add_response( + "describe_namespace", + service_response={"Namespace": {"Arn": namespace_arn}}, + expected_params={ + "AwsAccountId": account, + "Namespace": "default", + }, + ) + + ds1_arn = ( + f"arn:aws:quicksight:::dataset/{target_namespace}-circulation_view" + ) + ds2_arn = f"arn:aws:quicksight:::dataset/{target_namespace}-patron_events" + + stub.add_response( + "describe_data_set", + service_response={"DataSet": {"Arn": ds1_arn}}, + expected_params={ + "AwsAccountId": account, + "DataSetId": f"{target_namespace}-circulation_view", + }, + ) + stub.add_response( + "describe_data_set", + service_response={"DataSet": {"Arn": ds2_arn}}, + expected_params={ + "AwsAccountId": account, + "DataSetId": f"{target_namespace}-patron_events", + }, + ) + + stub.add_response( + "create_dashboard", + service_response={ + "ResponseMetadata": { + "RequestId": "7d276ede-baa4-4662-abb1-a917489c9e96", + "HTTPStatusCode": 200, + "HTTPHeaders": { + "date": "Tue, 19 Sep 2023 21:49:00 GMT", + "content-type": "application/json", + "content-length": "309", + "connection": "keep-alive", + "x-amzn-requestid": "7d276ede-baa4-4662-abb1-a917489c9e96", + }, + "RetryAttempts": 0, + }, + "Arn": "arn:aws:quicksight:us-west-2:128682227026:dashboard/tpp-prod-library", + "VersionArn": f"arn:aws:quicksight:::dashboard/{target_namespace}-library/version/6", + "DashboardId": "tpp-prod-library", + "CreationStatus": "CREATION_IN_PROGRESS", + "Status": 202, + "RequestId": "7d276ede-baa4-4662-abb1-a917489c9e96", + }, + expected_params={ + "AwsAccountId": account, + "Name": template_id, + "DashboardId": template_id, + "SourceEntity": { + "SourceTemplate": { + "DataSetReferences": [ + { + "DataSetPlaceholder": "circulation_view", + f"DataSetArn": ds1_arn, + }, + { + "DataSetPlaceholder": "patron_events", + "DataSetArn": ds2_arn, + }, + ], + "Arn": template_arn, + } + }, + }, + ) + group_arn = f"arn:aws:quicksight:::group/{group_name}" + stub.add_response( + "describe_group", + service_response={"Group": {"Arn": group_arn}}, + expected_params={ + "AwsAccountId": account, + "Namespace": "default", + "GroupName": group_name, + }, + ) + + stub.add_response( + "update_dashboard_permissions", + service_response={ + "ResponseMetadata": { + "RequestId": "14cfee14-421d-4ce1-91ea-e5aef9a0c0ca", + "HTTPStatusCode": 200, + "HTTPHeaders": { + "date": "Tue, 19 Sep 2023 21:32:57 GMT", + "content-type": "application/json", + "content-length": "633", + "connection": "keep-alive", + "x-amzn-requestid": "14cfee14-421d-4ce1-91ea-e5aef9a0c0ca", + }, + "RetryAttempts": 0, + }, + "Status": 200, + }, + expected_params={ + "AwsAccountId": account, + "DashboardId": template_id, + "GrantPermissions": [ + { + "Actions": [ + "quicksight:DescribeDashboard", + "quicksight:ListDashboardVersions", + "quicksight:QueryDashboard", + ], + "Principal": namespace_arn, + }, + { + "Actions": [ + "quicksight:DescribeDashboard", + "quicksight:ListDashboardVersions", + "quicksight:QueryDashboard", + ], + "Principal": group_arn, + }, + ], + }, + ) + op = PublishDashboardFromTemplateOperation( + qs_client=qs_client, + template_id=template_id, + target_namespace=target_namespace, + aws_account_id=account, + group_name=group_name, + ) + + op.execute() From 448f42cb0a170006d603abd9538e7b344b9b0b99 Mon Sep 17 00:00:00 2001 From: Daniel Bernstein Date: Tue, 19 Sep 2023 15:46:06 -0700 Subject: [PATCH 23/30] Improve output for cli operations. --- core/cli.py | 9 ++- core/operation/baseoperation.py | 2 +- core/operation/export_analysis_operation.py | 18 +++-- core/operation/import_from_json_operation.py | 28 ++++--- .../publish_dashboard_from_template.py | 15 ++-- .../test_export_analysis_operation.py | 4 +- .../test_import_template_operation.py | 3 +- .../core/operation/test_publish_operation.py | 76 ++----------------- 8 files changed, 51 insertions(+), 104 deletions(-) diff --git a/core/cli.py b/core/cli.py index ff78cdf..666c366 100644 --- a/core/cli.py +++ b/core/cli.py @@ -44,12 +44,13 @@ def export_analysis( click.echo(f"analysis_id= {analysis_id}") click.echo(f"aws_account_id= {aws_account_id}") click.echo(f"output_dir= {output_dir}") - ExportAnalysisOperation( + result = ExportAnalysisOperation( qs_client=create_quicksight_client(aws_profile=aws_profile), aws_account_id=aws_account_id, analysis_id=analysis_id, output_dir=output_dir, ).execute() + click.echo(result) cli.add_command(export_analysis) @@ -95,7 +96,7 @@ def import_template( click.echo(f"data_source_arn = {data_source_arn}") click.echo(f"input_dir= {input_dir}") - ImportFromJsonOperation( + result = ImportFromJsonOperation( qs_client=create_quicksight_client(aws_profile), aws_account_id=aws_account_id, template_name=template_name, @@ -103,6 +104,7 @@ def import_template( data_source_arn=data_source_arn, input_dir=input_dir, ).execute() + click.echo(result) cli.add_command(import_template) @@ -136,13 +138,14 @@ def publish_dashboard( click.echo(f"aws_account_id = {aws_account_id}") click.echo(f"template_id = {template_id}") click.echo(f"group_name = {group_name}") - PublishDashboardFromTemplateOperation( + result = PublishDashboardFromTemplateOperation( qs_client=create_quicksight_client(aws_profile), aws_account_id=aws_account_id, template_id=template_id, target_namespace=target_namespace, group_name=group_name, ).execute() + click.echo(result) cli.add_command(publish_dashboard) diff --git a/core/operation/baseoperation.py b/core/operation/baseoperation.py index 7078666..f996231 100644 --- a/core/operation/baseoperation.py +++ b/core/operation/baseoperation.py @@ -20,7 +20,7 @@ def __init__(self, qs_client, aws_account_id: str): self._log = logging.getLogger(self.__class__.__name__) @abstractmethod - def execute(self): + def execute(self) -> dict: pass def _create_or_update_template(self, template_data: dict) -> tuple[str, str, str]: diff --git a/core/operation/export_analysis_operation.py b/core/operation/export_analysis_operation.py index ab3bde3..c2be173 100644 --- a/core/operation/export_analysis_operation.py +++ b/core/operation/export_analysis_operation.py @@ -16,7 +16,7 @@ def __init__(self, analysis_id: str, output_dir: str, *args, **kwargs): self._output_dir = output_dir super().__init__(*args, **kwargs) - def execute(self): + def execute(self) -> dict: os.makedirs(self._resolve_path(self._output_dir, TEMPLATE_DIR), exist_ok=True) os.makedirs(self._resolve_path(self._output_dir, DATA_SET_DIR), exist_ok=True) @@ -29,7 +29,7 @@ def execute(self): if https_status != 200: self._log.error( - f"Unexpected response from describe_analysis request: {https_status} " + f"Unexpected response from describe_analysis request: {https_status}" ) return @@ -77,13 +77,13 @@ def verify_success() -> bool: # save the template as json file definition_json_str = json.dumps(map_to_save, indent=4) - template_file = self._resolve_path( + template_file_path = self._resolve_path( self._output_dir, TEMPLATE_DIR, self._template_definition["Name"] + ".json" ) - with open(template_file, "w") as template_file: + with open(template_file_path, "w") as template_file: template_file.write(definition_json_str) - files_to_update.append(template_file) + files_to_update.append(template_file_path) # for each dataset declaration identifiers for di in data_set_identifier_declarations: @@ -91,6 +91,8 @@ def verify_success() -> bool: ds_file = self._save_dataset_to_file(di=di) files_to_update.append(ds_file) + return {"status": "success", "files_exported": files_to_update} + def _create_or_update_template_from_analysis( self, analysis, data_set_references: List ): @@ -128,11 +130,11 @@ def _save_dataset_to_file(self, di) -> str: recursively_replace_value(ds_def_elements_to_save, "DataSourceArn", "") # save what is left to disk ds_def_str = json.dumps(ds_def_elements_to_save, indent=4) - dataset_file = self._resolve_path( + dataset_file_path = self._resolve_path( self._output_dir, DATA_SET_DIR, identifier + ".json" ) - with open(dataset_file, "w") as dataset_file: + with open(dataset_file_path, "w") as dataset_file: dataset_file.write(ds_def_str) - return dataset_file + return dataset_file_path diff --git a/core/operation/import_from_json_operation.py b/core/operation/import_from_json_operation.py index 2a2c597..bed9453 100644 --- a/core/operation/import_from_json_operation.py +++ b/core/operation/import_from_json_operation.py @@ -24,7 +24,7 @@ def __init__( self._intput_dir = input_dir super().__init__(*args, **kwargs) - def execute(self): + def execute(self) -> dict: # Read template file into dictionary template_data = None template_file = self._resolve_path( @@ -33,16 +33,6 @@ def execute(self): with open(template_file) as template_file: template_data = json.loads(template_file.read()) - # create namespace if not exists - # try: - # self._qs_client.create_namespace(AwsAccountId=self._aws_account_id, Namespace=self._target_namespace, - # IdentityStore="QUICKSIGHT") - # except self._qs_client.exceptions.ConflictException as e: - # self._log.info(f"Namespace {self._target_namespace} already exists: ignoring.") - # - # namespace = self._qs_client.describe_namespace(AwsAccountId=self._aws_account_id, - # Namespace=self._target_namespace) - # create name template in namespace template_data["Name"] = self._target_namespace + "-" + self._template_name template_data["TemplateId"] = template_data["Name"] @@ -56,6 +46,7 @@ def execute(self): # for each data set id associated with the template dataset_configurations = template_data["Definition"]["DataSetConfigurations"] + data_sets_created = [] for di in dataset_configurations: # Read data set into dictionary dataset = None @@ -78,10 +69,23 @@ def execute(self): placeholder=placeholder, namespace=self._target_namespace ) dataset["Name"] = dataset["Name"] - arn, data_set_id = self._create_or_update_data_set( + ds_arn, data_set_id = self._create_or_update_data_set( dataset_definition=dataset ) + data_sets_created.append( + { + "id": data_set_id, + "arn": ds_arn, + } + ) + + return { + "status": "success", + "data_sets": data_sets_created, + "template": {"id": template_id, "arn": arn, "version_arn": version_arn}, + } + def _create_or_update_data_set(self, dataset_definition: dict): """ Create new or updates existing DataSet diff --git a/core/operation/publish_dashboard_from_template.py b/core/operation/publish_dashboard_from_template.py index 3b30b0e..d86b089 100644 --- a/core/operation/publish_dashboard_from_template.py +++ b/core/operation/publish_dashboard_from_template.py @@ -17,12 +17,7 @@ def __init__( self._group_name = group_name super().__init__(*args, **kwargs) - def execute(self): - # get the template definition - template_def = self._get_template_definition(template_id=self._template_id)[ - "Definition" - ] - + def execute(self) -> dict: desc_template_params = { "AwsAccountId": self._aws_account_id, "TemplateId": self._template_id, @@ -53,7 +48,7 @@ def execute(self): ] # for each data set config - for dsr in template_def["DataSetConfigurations"]: + for dsr in template["Version"]["DataSetConfigurations"]: # resolve the dataset arn placeholder = dsr["Placeholder"] data_set_id = self._resolve_data_set_id_from_placeholder( @@ -115,6 +110,12 @@ def execute(self): f"Unexpected response from trying to update_dashboard_permissions : {json.dumps(response, indent=4)} " ) + return { + "status": "success", + "dashboard_arn": dashboard_arn, + "dashboard_id": dashboard_id, + } + def _create_or_update_dashboard(self, dashboard_params: dict) -> tuple[str, str]: """ Creates new or updates existing template. diff --git a/tests/core/operation/test_export_analysis_operation.py b/tests/core/operation/test_export_analysis_operation.py index f19a2cc..d7b1462 100644 --- a/tests/core/operation/test_export_analysis_operation.py +++ b/tests/core/operation/test_export_analysis_operation.py @@ -109,7 +109,9 @@ def test(self): aws_account_id=account, ) - op.execute() + results = op.execute() + + assert results["status"] == "success" assets_dir = os.path.join(output_dir, "assets") data_sets_dir = os.path.join(assets_dir, "data-sets") diff --git a/tests/core/operation/test_import_template_operation.py b/tests/core/operation/test_import_template_operation.py index 030bdc2..89dd9e8 100644 --- a/tests/core/operation/test_import_template_operation.py +++ b/tests/core/operation/test_import_template_operation.py @@ -216,4 +216,5 @@ def test(self): data_source_arn=data_source_arn, ) - op.execute() + result = op.execute() + assert result["status"] == "success" diff --git a/tests/core/operation/test_publish_operation.py b/tests/core/operation/test_publish_operation.py index 32e8b0d..316ce8f 100644 --- a/tests/core/operation/test_publish_operation.py +++ b/tests/core/operation/test_publish_operation.py @@ -1,9 +1,6 @@ -from datetime import datetime - import botocore from botocore.config import Config from botocore.stub import Stubber -from dateutil.tz import tzlocal from core.operation.publish_dashboard_from_template import ( PublishDashboardFromTemplateOperation, @@ -32,70 +29,12 @@ def test(self): } with Stubber(qs_client) as stub: template_arn = f"arn:aws:quicksight:::template/{target_namespace}-library" - - stub.add_response( - "describe_template_definition", - expected_params=describe_template_definition_params, - service_response={ - "ResponseMetadata": { - "RequestId": "f89ac94e-34e1-4e45-9b12-c56af317e195", - "HTTPStatusCode": 200, - "HTTPHeaders": { - "date": "Tue, 19 Sep 2023 21:58:09 GMT", - "content-type": "application/json", - "content-length": "172398", - "connection": "keep-alive", - "x-amzn-requestid": "f89ac94e-34e1-4e45-9b12-c56af317e195", - }, - "RetryAttempts": 0, - }, - "Status": 200, - "Name": "tpp-prod-library", - "TemplateId": f"{target_namespace}-library", - "ResourceStatus": "CREATION_SUCCESSFUL", - "Definition": { - "DataSetConfigurations": [ - { - "Placeholder": "circulation_view", - "DataSetSchema": {"ColumnSchemaList": []}, - "ColumnGroupSchemaList": [], - }, - { - "Placeholder": "patron_events", - "DataSetSchema": {"ColumnSchemaList": []}, - "ColumnGroupSchemaList": [], - }, - ], - "Sheets": [], - "AnalysisDefaults": { - "DefaultNewSheetConfiguration": { - "InteractiveLayoutConfiguration": { - "Grid": { - "CanvasSizeOptions": { - "ScreenCanvasSizeOptions": { - "ResizeOption": "FIXED", - "OptimizedViewPortWidth": "1600px", - } - } - } - }, - "SheetContentType": "INTERACTIVE", - } - }, - }, - "RequestId": "f89ac94e-34e1-4e45-9b12-c56af317e195", - }, - ) stub.add_response( "describe_template", service_response={ "Template": { "Arn": template_arn, - "Name": "tpp-prod-library", "Version": { - "CreatedTime": datetime( - 2023, 9, 19, 11, 58, 37, 288000, tzinfo=tzlocal() - ), "VersionNumber": 5, "Status": "CREATION_SUCCESSFUL", "DataSetConfigurations": [ @@ -110,15 +49,7 @@ def test(self): "ColumnGroupSchemaList": [], }, ], - "Sheets": [], }, - "TemplateId": "tpp-prod-library", - "LastUpdatedTime": datetime( - 2023, 9, 19, 11, 58, 37, 284000, tzinfo=tzlocal() - ), - "CreatedTime": datetime( - 2023, 9, 1, 12, 50, 43, 593000, tzinfo=tzlocal() - ), } }, expected_params={"AwsAccountId": account, "TemplateId": template_id}, @@ -174,7 +105,7 @@ def test(self): }, "Arn": "arn:aws:quicksight:us-west-2:128682227026:dashboard/tpp-prod-library", "VersionArn": f"arn:aws:quicksight:::dashboard/{target_namespace}-library/version/6", - "DashboardId": "tpp-prod-library", + "DashboardId": f"{target_namespace}-library", "CreationStatus": "CREATION_IN_PROGRESS", "Status": 202, "RequestId": "7d276ede-baa4-4662-abb1-a917489c9e96", @@ -259,4 +190,7 @@ def test(self): group_name=group_name, ) - op.execute() + result = op.execute() + + assert result["status"] == "success" + assert result["dashboard_id"] == template_id From 81686680cb6444054f15ced9cf0f2c4378b173e5 Mon Sep 17 00:00:00 2001 From: Daniel Bernstein Date: Mon, 25 Sep 2023 13:21:22 -0700 Subject: [PATCH 24/30] Use log.info rather than click.echo. Also correct variable style. --- core/cli.py | 49 ++++++++++++------- core/operation/baseoperation.py | 6 +-- core/operation/import_from_json_operation.py | 7 +-- .../publish_dashboard_from_template.py | 8 +-- 4 files changed, 41 insertions(+), 29 deletions(-) diff --git a/core/cli.py b/core/cli.py index 666c366..e1bfe88 100644 --- a/core/cli.py +++ b/core/cli.py @@ -9,6 +9,17 @@ PublishDashboardFromTemplateOperation, ) + +logging.basicConfig( + level=logging.DEBUG, + format="%(asctime)s [%(levelname)s] %(message)s", + handlers=[ + logging.FileHandler("palace-quicksight.log"), + logging.StreamHandler() + ] +) + + log = logging.getLogger("core.cli") @@ -39,18 +50,18 @@ def export_analysis( """ Exports a template and dependent data sets based on the specified analysis to JSON files. """ - click.echo(f"Create version") - click.echo(f"aws_profile = {aws_profile}") - click.echo(f"analysis_id= {analysis_id}") - click.echo(f"aws_account_id= {aws_account_id}") - click.echo(f"output_dir= {output_dir}") + log.info(f"Create version") + log.info(f"aws_profile = {aws_profile}") + log.info(f"analysis_id= {analysis_id}") + log.info(f"aws_account_id={aws_account_id}") + log.info(f"output_dir={output_dir}") result = ExportAnalysisOperation( qs_client=create_quicksight_client(aws_profile=aws_profile), aws_account_id=aws_account_id, analysis_id=analysis_id, output_dir=output_dir, ).execute() - click.echo(result) + log.info(result) cli.add_command(export_analysis) @@ -89,12 +100,12 @@ def import_template( Import template and datasource files from json """ - click.echo(f"import_from_json") - click.echo(f"aws_profile = {aws_profile}") - click.echo(f"aws_account_id = {aws_account_id}") - click.echo(f"template_name = {template_name}") - click.echo(f"data_source_arn = {data_source_arn}") - click.echo(f"input_dir= {input_dir}") + log.info(f"import_from_json") + log.info(f"aws_profile = {aws_profile}") + log.info(f"aws_account_id = {aws_account_id}") + log.info(f"template_name = {template_name}") + log.info(f"data_source_arn = {data_source_arn}") + log.info(f"input_dir= {input_dir}") result = ImportFromJsonOperation( qs_client=create_quicksight_client(aws_profile), @@ -104,7 +115,7 @@ def import_template( data_source_arn=data_source_arn, input_dir=input_dir, ).execute() - click.echo(result) + log.info(result) cli.add_command(import_template) @@ -133,11 +144,11 @@ def publish_dashboard( Create/Update a dashboard from a template """ - click.echo(f"publish dashboard from template") - click.echo(f"aws_profile = {aws_profile}") - click.echo(f"aws_account_id = {aws_account_id}") - click.echo(f"template_id = {template_id}") - click.echo(f"group_name = {group_name}") + log.info(f"publish dashboard from template") + log.info(f"aws_profile = {aws_profile}") + log.info(f"aws_account_id = {aws_account_id}") + log.info(f"template_id = {template_id}") + log.info(f"group_name = {group_name}") result = PublishDashboardFromTemplateOperation( qs_client=create_quicksight_client(aws_profile), aws_account_id=aws_account_id, @@ -145,7 +156,7 @@ def publish_dashboard( target_namespace=target_namespace, group_name=group_name, ).execute() - click.echo(result) + log.info(result) cli.add_command(publish_dashboard) diff --git a/core/operation/baseoperation.py b/core/operation/baseoperation.py index f996231..ea4d890 100644 --- a/core/operation/baseoperation.py +++ b/core/operation/baseoperation.py @@ -33,10 +33,10 @@ def _create_or_update_template(self, template_data: dict) -> tuple[str, str, str response = self._qs_client.create_template(**template_data) except self._qs_client.exceptions.ResourceExistsException as e: response = self._qs_client.update_template(**template_data) - httpStatus = response["ResponseMetadata"]["HTTPStatusCode"] - if httpStatus != 202: + http_status = response["ResponseMetadata"]["HTTPStatusCode"] + if http_status != 202: self._log.error( - f"Unexpected response from create_template request: {httpStatus} " + f"Unexpected response from create_template request: {http_status} " ) raise Exception( f"Unexpected response from trying to create/update template : {json.dumps(response, indent=4)} " diff --git a/core/operation/import_from_json_operation.py b/core/operation/import_from_json_operation.py index bed9453..3d34f17 100644 --- a/core/operation/import_from_json_operation.py +++ b/core/operation/import_from_json_operation.py @@ -95,11 +95,12 @@ def _create_or_update_data_set(self, dataset_definition: dict): try: response = self._qs_client.create_data_set(**dataset_definition) except self._qs_client.exceptions.ResourceExistsException as e: + response = self._qs_client.update_data_set(**dataset_definition) - httpStatus = response["ResponseMetadata"]["HTTPStatusCode"] - if httpStatus != 201 and httpStatus != 200: + http_status = response["ResponseMetadata"]["HTTPStatusCode"] + if http_status != 201 and http_status != 200: self._log.error( - f"Unexpected response from create_dataset request: {httpStatus} " + f"Unexpected response from create_dataset request: {http_status} " ) raise Exception( f"Unexpected response from trying to create/update dataset : {json.dumps(response, indent=4)} " diff --git a/core/operation/publish_dashboard_from_template.py b/core/operation/publish_dashboard_from_template.py index d86b089..2de92db 100644 --- a/core/operation/publish_dashboard_from_template.py +++ b/core/operation/publish_dashboard_from_template.py @@ -101,8 +101,8 @@ def execute(self) -> dict: } response = self._qs_client.update_dashboard_permissions(**permissions_params) - httpStatus = response["ResponseMetadata"]["HTTPStatusCode"] - if httpStatus != 202 and httpStatus != 200: + http_status = response["ResponseMetadata"]["HTTPStatusCode"] + if http_status != 202 and http_status != 200: self._log.error( f"Unexpected response from update_dashboard_permissions request: {httpStatus} " ) @@ -126,8 +126,8 @@ def _create_or_update_dashboard(self, dashboard_params: dict) -> tuple[str, str] response = self._qs_client.create_dashboard(**dashboard_params) except self._qs_client.exceptions.ResourceExistsException as e: response = self._qs_client.update_dashboard(**dashboard_params) - httpStatus = response["ResponseMetadata"]["HTTPStatusCode"] - if httpStatus != 202 and httpStatus != 200: + http_status = response["ResponseMetadata"]["HTTPStatusCode"] + if http_status != 202 and http_status != 200: self._log.error( f"Unexpected response from create_template request: {httpStatus} " ) From add93342e9bffe8c02f282fa1d9431021eeb8caa Mon Sep 17 00:00:00 2001 From: Daniel Bernstein Date: Mon, 25 Sep 2023 14:26:10 -0700 Subject: [PATCH 25/30] Use dataclass instead of tuple for return values. --- core/cli.py | 6 +--- core/operation/baseoperation.py | 21 +++++++++++--- core/operation/export_analysis_operation.py | 13 ++++++--- core/operation/import_from_json_operation.py | 29 ++++++++++++-------- 4 files changed, 44 insertions(+), 25 deletions(-) diff --git a/core/cli.py b/core/cli.py index e1bfe88..0216c77 100644 --- a/core/cli.py +++ b/core/cli.py @@ -9,14 +9,10 @@ PublishDashboardFromTemplateOperation, ) - logging.basicConfig( level=logging.DEBUG, format="%(asctime)s [%(levelname)s] %(message)s", - handlers=[ - logging.FileHandler("palace-quicksight.log"), - logging.StreamHandler() - ] + handlers=[logging.FileHandler("palace-quicksight.log"), logging.StreamHandler()], ) diff --git a/core/operation/baseoperation.py b/core/operation/baseoperation.py index ea4d890..20d0fef 100644 --- a/core/operation/baseoperation.py +++ b/core/operation/baseoperation.py @@ -2,12 +2,20 @@ import logging import os from abc import abstractmethod +from dataclasses import dataclass ASSET_DIR = "assets" TEMPLATE_DIR = os.path.join(ASSET_DIR, "templates") DATA_SET_DIR = os.path.join(ASSET_DIR, "data-sets") +@dataclass +class TemplateResponse: + arn: str + version_arn: str + template_id: str + + class BaseOperation: """ @@ -23,16 +31,19 @@ def __init__(self, qs_client, aws_account_id: str): def execute(self) -> dict: pass - def _create_or_update_template(self, template_data: dict) -> tuple[str, str, str]: + def _create_or_update_template(self, template_data: dict) -> TemplateResponse: """ Creates new or updates existing template. :param template_data: :return: Template ARN, Template Version ARN, and the Template ID """ + local_template_data = template_data.copy() + try: - response = self._qs_client.create_template(**template_data) + response = self._qs_client.create_template(**local_template_data) except self._qs_client.exceptions.ResourceExistsException as e: response = self._qs_client.update_template(**template_data) + http_status = response["ResponseMetadata"]["HTTPStatusCode"] if http_status != 202: self._log.error( @@ -42,11 +53,13 @@ def _create_or_update_template(self, template_data: dict) -> tuple[str, str, str f"Unexpected response from trying to create/update template : {json.dumps(response, indent=4)} " ) else: - return response["Arn"], response["VersionArn"], response["TemplateId"] + return TemplateResponse( + response["Arn"], response["VersionArn"], response["TemplateId"] + ) def _create_or_update_template_from_template_definition( self, template_definition: dict - ): + ) -> TemplateResponse: template_definition["AwsAccountId"] = self._aws_account_id return self._create_or_update_template(template_data=template_definition) diff --git a/core/operation/export_analysis_operation.py b/core/operation/export_analysis_operation.py index c2be173..ea78764 100644 --- a/core/operation/export_analysis_operation.py +++ b/core/operation/export_analysis_operation.py @@ -2,7 +2,12 @@ import os from typing import List -from core.operation.baseoperation import DATA_SET_DIR, TEMPLATE_DIR, BaseOperation +from core.operation.baseoperation import ( + DATA_SET_DIR, + TEMPLATE_DIR, + BaseOperation, + TemplateResponse, +) from core.util import recursively_replace_value, retry @@ -54,13 +59,13 @@ def execute(self) -> dict: ) # create a template from the analysis - arn, version_arn, template_id = self._create_or_update_template_from_analysis( + template_response = self._create_or_update_template_from_analysis( analysis=analysis, data_set_references=data_set_references ) def verify_success() -> bool: self._template_definition = self._get_template_definition( - template_id=template_id + template_id=template_response.template_id ) return "SUCCESSFUL" in self._template_definition["ResourceStatus"] @@ -95,7 +100,7 @@ def verify_success() -> bool: def _create_or_update_template_from_analysis( self, analysis, data_set_references: List - ): + ) -> TemplateResponse: template_name = analysis["Name"] params = { "AwsAccountId": self._aws_account_id, diff --git a/core/operation/import_from_json_operation.py b/core/operation/import_from_json_operation.py index 3d34f17..af79336 100644 --- a/core/operation/import_from_json_operation.py +++ b/core/operation/import_from_json_operation.py @@ -1,9 +1,16 @@ import json +from dataclasses import dataclass from core.operation.baseoperation import DATA_SET_DIR, TEMPLATE_DIR, BaseOperation from core.util import recursively_replace_value +@dataclass +class DataSetResponse: + arn: str + data_set_id: str + + class ImportFromJsonOperation(BaseOperation): """ Imports a Quicksight template and all it's dependencies into Quicksight. @@ -36,11 +43,7 @@ def execute(self) -> dict: # create name template in namespace template_data["Name"] = self._target_namespace + "-" + self._template_name template_data["TemplateId"] = template_data["Name"] - ( - arn, - version_arn, - template_id, - ) = self._create_or_update_template_from_template_definition( + template_response = self._create_or_update_template_from_template_definition( template_definition=template_data ) @@ -69,21 +72,23 @@ def execute(self) -> dict: placeholder=placeholder, namespace=self._target_namespace ) dataset["Name"] = dataset["Name"] - ds_arn, data_set_id = self._create_or_update_data_set( - dataset_definition=dataset - ) + ds_response = self._create_or_update_data_set(dataset_definition=dataset) data_sets_created.append( { - "id": data_set_id, - "arn": ds_arn, + "id": ds_response.data_set_id, + "arn": ds_response.arn, } ) return { "status": "success", "data_sets": data_sets_created, - "template": {"id": template_id, "arn": arn, "version_arn": version_arn}, + "template": { + "id": template_response.template_id, + "arn": template_response.arn, + "version_arn": template_response.version_arn, + }, } def _create_or_update_data_set(self, dataset_definition: dict): @@ -106,4 +111,4 @@ def _create_or_update_data_set(self, dataset_definition: dict): f"Unexpected response from trying to create/update dataset : {json.dumps(response, indent=4)} " ) else: - return response["Arn"], response["DataSetId"] + return DataSetResponse(response["Arn"], response["DataSetId"]) From 0a360cc3ae75ccc63d29053e388b98c22ac84fef Mon Sep 17 00:00:00 2001 From: Daniel Bernstein Date: Mon, 25 Sep 2023 15:18:14 -0700 Subject: [PATCH 26/30] Remove existing templates and data sets before creating to ensure that permissions and tags are overwritten properly. --- core/operation/baseoperation.py | 21 ++++++-- core/operation/export_analysis_operation.py | 5 +- core/operation/import_from_json_operation.py | 19 +++++-- .../publish_dashboard_from_template.py | 4 +- .../test_export_analysis_operation.py | 9 ++++ .../test_import_template_operation.py | 53 +++++++++++++++---- 6 files changed, 90 insertions(+), 21 deletions(-) diff --git a/core/operation/baseoperation.py b/core/operation/baseoperation.py index 20d0fef..b01961d 100644 --- a/core/operation/baseoperation.py +++ b/core/operation/baseoperation.py @@ -1,6 +1,7 @@ import json import logging import os +import time from abc import abstractmethod from dataclasses import dataclass @@ -17,7 +18,6 @@ class TemplateResponse: class BaseOperation: - """ A base class for AWS based operations. """ @@ -37,12 +37,23 @@ def _create_or_update_template(self, template_data: dict) -> TemplateResponse: :param template_data: :return: Template ARN, Template Version ARN, and the Template ID """ - local_template_data = template_data.copy() try: - response = self._qs_client.create_template(**local_template_data) - except self._qs_client.exceptions.ResourceExistsException as e: - response = self._qs_client.update_template(**template_data) + self._qs_client.delete_template( + **{ + "TemplateId": template_data["TemplateId"], + "AwsAccountId": template_data["AwsAccountId"], + } + ) + + # there can be some latency between the completion of the deletion command + # and the complete backend deletion operation. + time.sleep(3) + + except self._qs_client.exceptions.ResourceNotFoundException as e: + pass + + response = self._qs_client.create_template(**template_data) http_status = response["ResponseMetadata"]["HTTPStatusCode"] if http_status != 202: diff --git a/core/operation/export_analysis_operation.py b/core/operation/export_analysis_operation.py index ea78764..f7939a3 100644 --- a/core/operation/export_analysis_operation.py +++ b/core/operation/export_analysis_operation.py @@ -33,10 +33,11 @@ def execute(self) -> dict: https_status = analysis_description["ResponseMetadata"]["HTTPStatusCode"] if https_status != 200: - self._log.error( + message = ( f"Unexpected response from describe_analysis request: {https_status}" ) - return + self._log.error(message) + raise Exception(message) # retrieve definition analysis_definition = self._qs_client.describe_analysis_definition( diff --git a/core/operation/import_from_json_operation.py b/core/operation/import_from_json_operation.py index af79336..03063e2 100644 --- a/core/operation/import_from_json_operation.py +++ b/core/operation/import_from_json_operation.py @@ -1,4 +1,5 @@ import json +import time from dataclasses import dataclass from core.operation.baseoperation import DATA_SET_DIR, TEMPLATE_DIR, BaseOperation @@ -97,11 +98,23 @@ def _create_or_update_data_set(self, dataset_definition: dict): :param dataset_definition: :return: DataSet ARN and DataSet Id """ + try: - response = self._qs_client.create_data_set(**dataset_definition) - except self._qs_client.exceptions.ResourceExistsException as e: + self._qs_client.delete_data_set( + **{ + "DataSetId": dataset_definition["DataSetId"], + "AwsAccountId": dataset_definition["AwsAccountId"], + } + ) + + # there can be some latency between the completion of the deletion command + # and the complete backend deletion operation. + time.sleep(3) + + except self._qs_client.exceptions.ResourceNotFoundException as e: + pass - response = self._qs_client.update_data_set(**dataset_definition) + response = self._qs_client.create_data_set(**dataset_definition) http_status = response["ResponseMetadata"]["HTTPStatusCode"] if http_status != 201 and http_status != 200: self._log.error( diff --git a/core/operation/publish_dashboard_from_template.py b/core/operation/publish_dashboard_from_template.py index 2de92db..4b62f51 100644 --- a/core/operation/publish_dashboard_from_template.py +++ b/core/operation/publish_dashboard_from_template.py @@ -104,7 +104,7 @@ def execute(self) -> dict: http_status = response["ResponseMetadata"]["HTTPStatusCode"] if http_status != 202 and http_status != 200: self._log.error( - f"Unexpected response from update_dashboard_permissions request: {httpStatus} " + f"Unexpected response from update_dashboard_permissions request: {http_status} " ) raise Exception( f"Unexpected response from trying to update_dashboard_permissions : {json.dumps(response, indent=4)} " @@ -129,7 +129,7 @@ def _create_or_update_dashboard(self, dashboard_params: dict) -> tuple[str, str] http_status = response["ResponseMetadata"]["HTTPStatusCode"] if http_status != 202 and http_status != 200: self._log.error( - f"Unexpected response from create_template request: {httpStatus} " + f"Unexpected response from create_template request: {http_status} " ) raise Exception( f"Unexpected response from trying to create/update template : {json.dumps(response, indent=4)} " diff --git a/tests/core/operation/test_export_analysis_operation.py b/tests/core/operation/test_export_analysis_operation.py index d7b1462..9c1b0e5 100644 --- a/tests/core/operation/test_export_analysis_operation.py +++ b/tests/core/operation/test_export_analysis_operation.py @@ -68,6 +68,15 @@ def test(self): }, } + stub.add_response( + "delete_template", + service_response={}, + expected_params={ + "TemplateId": "library-template", + "AwsAccountId": account, + }, + ) + stub.add_response( "create_template", service_response=create_template_response(), diff --git a/tests/core/operation/test_import_template_operation.py b/tests/core/operation/test_import_template_operation.py index 89dd9e8..f985991 100644 --- a/tests/core/operation/test_import_template_operation.py +++ b/tests/core/operation/test_import_template_operation.py @@ -6,7 +6,6 @@ def create_data_set_response(target_namespace, data_set_name): - new_ds_name = f"{target_namespace}-{data_set_name}" response = { "ResponseMetadata": { "RequestId": "3aecd4ed-9a15-408a-a251-532718e574bd", @@ -21,8 +20,8 @@ def create_data_set_response(target_namespace, data_set_name): "RetryAttempts": 0, }, "Status": 200, - "Arn": f"arn:aws:quicksight:us-west-2:128682227026:dataset/{new_ds_name}", - "DataSetId": new_ds_name, + "Arn": f"arn:aws:quicksight:us-west-2:128682227026:dataset/{data_set_name}", + "DataSetId": data_set_name, "RequestId": "3aecd4ed-9a15-408a-a251-532718e574bd", } return response @@ -163,6 +162,10 @@ def create_template_response(new_template_name): } +def create_new_dataset_name(target_namespace: str, data_set_name: str) -> str: + return f"{target_namespace}-{data_set_name}" + + class TestImportTemplateOperation: def test(self): template_name = "library" @@ -181,27 +184,59 @@ def test(self): "quicksight", config=boto_config ) with Stubber(qs_client) as stub: + + stub.add_response( + "delete_template", + service_response={}, + expected_params={ + "TemplateId": new_template_name, + "AwsAccountId": account, + }, + ) + stub.add_response( "create_template", service_response=create_template_response(new_template_name), expected_params=create_template_params(target_namespace, account), ) + ds1_name = create_new_dataset_name( + target_namespace=target_namespace, data_set_name="circulation_view" + ) + + stub.add_response( + "delete_data_set", + service_response={}, + expected_params={ + "DataSetId": ds1_name, + "AwsAccountId": account, + }, + ) + stub.add_response( "create_data_set", - service_response=create_data_set_response( - target_namespace, "circulation_view" - ), + service_response=create_data_set_response(target_namespace, ds1_name), expected_params=create_data_set_params1( input_dir, target_namespace, data_source_arn, account ), ) + ds2_name = create_new_dataset_name( + target_namespace=target_namespace, data_set_name="patron_events" + ) + + stub.add_response( + "delete_data_set", + service_response={}, + expected_params={ + "DataSetId": ds2_name, + "AwsAccountId": account, + }, + ) + stub.add_response( "create_data_set", - service_response=create_data_set_response( - target_namespace, "patron_events" - ), + service_response=create_data_set_response(target_namespace, ds2_name), expected_params=create_data_set_params2( input_dir, target_namespace, data_source_arn, account ), From 0baa75fa7b42714e27c03a210d8a95d4f360af5a Mon Sep 17 00:00:00 2001 From: Daniel Bernstein Date: Mon, 25 Sep 2023 15:40:46 -0700 Subject: [PATCH 27/30] Fix typo and use tempfile to generated test output file. --- core/operation/import_from_json_operation.py | 6 +++--- tests/core/operation/test_export_analysis_operation.py | 3 ++- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/core/operation/import_from_json_operation.py b/core/operation/import_from_json_operation.py index 03063e2..62811e2 100644 --- a/core/operation/import_from_json_operation.py +++ b/core/operation/import_from_json_operation.py @@ -29,14 +29,14 @@ def __init__( self._template_name = template_name self._target_namespace = target_namespace self._data_source_arn = data_source_arn - self._intput_dir = input_dir + self._input_dir = input_dir super().__init__(*args, **kwargs) def execute(self) -> dict: # Read template file into dictionary template_data = None template_file = self._resolve_path( - self._intput_dir, TEMPLATE_DIR, self._template_name + ".json" + self._input_dir, TEMPLATE_DIR, self._template_name + ".json" ) with open(template_file) as template_file: template_data = json.loads(template_file.read()) @@ -56,7 +56,7 @@ def execute(self) -> dict: dataset = None placeholder = di["Placeholder"] dataset_filename = self._resolve_path( - self._intput_dir, DATA_SET_DIR, placeholder + ".json" + self._input_dir, DATA_SET_DIR, placeholder + ".json" ) with open(dataset_filename) as dataset_file: dataset = json.loads(dataset_file.read()) diff --git a/tests/core/operation/test_export_analysis_operation.py b/tests/core/operation/test_export_analysis_operation.py index 9c1b0e5..ae77d05 100644 --- a/tests/core/operation/test_export_analysis_operation.py +++ b/tests/core/operation/test_export_analysis_operation.py @@ -1,4 +1,5 @@ import os +import tempfile import botocore from botocore.config import Config @@ -18,7 +19,7 @@ class TestExportAnalysisOperation: def test(self): analysis_id = "my-quicksight-analysis-id" - output_dir = "/tmp/test-output" + output_dir = tempfile.NamedTemporaryFile().name account = "012345678910" boto_config = Config( From b770c8e70b83fb017a640b74f3fe72f0b5063f80 Mon Sep 17 00:00:00 2001 From: Daniel Bernstein Date: Mon, 25 Sep 2023 15:47:21 -0700 Subject: [PATCH 28/30] Remove extraneous comment. --- core/operation/import_from_json_operation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/operation/import_from_json_operation.py b/core/operation/import_from_json_operation.py index 62811e2..30c592a 100644 --- a/core/operation/import_from_json_operation.py +++ b/core/operation/import_from_json_operation.py @@ -41,7 +41,7 @@ def execute(self) -> dict: with open(template_file) as template_file: template_data = json.loads(template_file.read()) - # create name template in namespace + # create or update template template_data["Name"] = self._target_namespace + "-" + self._template_name template_data["TemplateId"] = template_data["Name"] template_response = self._create_or_update_template_from_template_definition( From 60111c29ace43a814e5a4839ea80d5eae5b5fe53 Mon Sep 17 00:00:00 2001 From: Daniel Bernstein Date: Mon, 25 Sep 2023 16:07:42 -0700 Subject: [PATCH 29/30] Use standard approach to passing aws profiles to boto3. --- core/cli.py | 22 ++++++---------------- 1 file changed, 6 insertions(+), 16 deletions(-) diff --git a/core/cli.py b/core/cli.py index 0216c77..114c081 100644 --- a/core/cli.py +++ b/core/cli.py @@ -19,8 +19,8 @@ log = logging.getLogger("core.cli") -def create_quicksight_client(aws_profile: str): - boto3.setup_default_session(profile_name=aws_profile) +def create_quicksight_client(): + boto3.setup_default_session() return boto3.client("quicksight") @@ -30,7 +30,6 @@ def cli(): @click.command() -@click.option("--aws-profile", required=True, help="The AWS account profile") @click.option("--aws-account-id", required=True, help="The ID of the AWS account") @click.option( "--analysis-id", required=True, help="The ID of the Analysis to be exported" @@ -40,19 +39,16 @@ def cli(): required=True, help="The path to the output directory to which resources will be exported", ) -def export_analysis( - aws_profile: str, aws_account_id: str, analysis_id: str, output_dir: str -): +def export_analysis(aws_account_id: str, analysis_id: str, output_dir: str): """ Exports a template and dependent data sets based on the specified analysis to JSON files. """ log.info(f"Create version") - log.info(f"aws_profile = {aws_profile}") log.info(f"analysis_id= {analysis_id}") log.info(f"aws_account_id={aws_account_id}") log.info(f"output_dir={output_dir}") result = ExportAnalysisOperation( - qs_client=create_quicksight_client(aws_profile=aws_profile), + qs_client=create_quicksight_client(), aws_account_id=aws_account_id, analysis_id=analysis_id, output_dir=output_dir, @@ -64,7 +60,6 @@ def export_analysis( @click.command -@click.option("--aws-profile", required=True, help="The AWS account profile") @click.option("--aws-account-id", required=True, help="The ID of the AWS account") @click.option( "--template-name", required=True, help="The name of the template to be restored" @@ -85,7 +80,6 @@ def export_analysis( help="The path to the input directory from which resources will be imported", ) def import_template( - aws_profile: str, aws_account_id: str, template_name: str, data_source_arn: str, @@ -97,14 +91,13 @@ def import_template( """ log.info(f"import_from_json") - log.info(f"aws_profile = {aws_profile}") log.info(f"aws_account_id = {aws_account_id}") log.info(f"template_name = {template_name}") log.info(f"data_source_arn = {data_source_arn}") log.info(f"input_dir= {input_dir}") result = ImportFromJsonOperation( - qs_client=create_quicksight_client(aws_profile), + qs_client=create_quicksight_client(), aws_account_id=aws_account_id, template_name=template_name, target_namespace=target_namespace, @@ -118,7 +111,6 @@ def import_template( @click.command -@click.option("--aws-profile", required=True, help="The AWS account profile") @click.option("--aws-account-id", required=True, help="The ID of the AWS account") @click.option( "--template-id", required=True, help="The ID of the template to be restored" @@ -130,7 +122,6 @@ def import_template( ) @click.option("--group-name", required=True, help="Name of the Quicksight User Group") def publish_dashboard( - aws_profile: str, aws_account_id: str, template_id: str, target_namespace: str, @@ -141,12 +132,11 @@ def publish_dashboard( """ log.info(f"publish dashboard from template") - log.info(f"aws_profile = {aws_profile}") log.info(f"aws_account_id = {aws_account_id}") log.info(f"template_id = {template_id}") log.info(f"group_name = {group_name}") result = PublishDashboardFromTemplateOperation( - qs_client=create_quicksight_client(aws_profile), + qs_client=create_quicksight_client(), aws_account_id=aws_account_id, template_id=template_id, target_namespace=target_namespace, From 1960ba59d28904ae266605613ddbd58c8796e083 Mon Sep 17 00:00:00 2001 From: Daniel Bernstein Date: Tue, 3 Oct 2023 12:17:17 -0700 Subject: [PATCH 30/30] Improve logging. --- core/operation/baseoperation.py | 14 +++++++++++--- core/operation/import_from_json_operation.py | 16 +++++++++++++--- 2 files changed, 24 insertions(+), 6 deletions(-) diff --git a/core/operation/baseoperation.py b/core/operation/baseoperation.py index b01961d..7912361 100644 --- a/core/operation/baseoperation.py +++ b/core/operation/baseoperation.py @@ -38,10 +38,13 @@ def _create_or_update_template(self, template_data: dict) -> TemplateResponse: :return: Template ARN, Template Version ARN, and the Template ID """ + template_id = template_data["TemplateId"] try: + self._log.info(f"ready to delete template ({template_id}) if exists.") + self._qs_client.delete_template( **{ - "TemplateId": template_data["TemplateId"], + "TemplateId": template_id, "AwsAccountId": template_data["AwsAccountId"], } ) @@ -49,21 +52,26 @@ def _create_or_update_template(self, template_data: dict) -> TemplateResponse: # there can be some latency between the completion of the deletion command # and the complete backend deletion operation. time.sleep(3) + self._log.info(f"template ({template_id}) deletion complete.") except self._qs_client.exceptions.ResourceNotFoundException as e: - pass + self._log.info(f"template ({template_id}) not found: no deletion needed.") response = self._qs_client.create_template(**template_data) http_status = response["ResponseMetadata"]["HTTPStatusCode"] if http_status != 202: self._log.error( - f"Unexpected response from create_template request: {http_status} " + f"Unexpected response from create_template request: " + f"template_id = {template_id}, http_status = {http_status}" ) raise Exception( f"Unexpected response from trying to create/update template : {json.dumps(response, indent=4)} " ) else: + self._log.info( + f"Template ({template_id}) created successfully: http_status = {http_status}" + ) return TemplateResponse( response["Arn"], response["VersionArn"], response["TemplateId"] ) diff --git a/core/operation/import_from_json_operation.py b/core/operation/import_from_json_operation.py index 30c592a..e296975 100644 --- a/core/operation/import_from_json_operation.py +++ b/core/operation/import_from_json_operation.py @@ -99,10 +99,12 @@ def _create_or_update_data_set(self, dataset_definition: dict): :return: DataSet ARN and DataSet Id """ + data_set_id = dataset_definition["DataSetId"] try: + self._log.info(f"ready to delete data set ({data_set_id}) if exists.") self._qs_client.delete_data_set( **{ - "DataSetId": dataset_definition["DataSetId"], + "DataSetId": data_set_id, "AwsAccountId": dataset_definition["AwsAccountId"], } ) @@ -110,18 +112,26 @@ def _create_or_update_data_set(self, dataset_definition: dict): # there can be some latency between the completion of the deletion command # and the complete backend deletion operation. time.sleep(3) + self._log.info(f"Deletion complete for {data_set_id}.") except self._qs_client.exceptions.ResourceNotFoundException as e: - pass + self._log.info( + f"No deletion necessary: data set {data_set_id} does not exist." + ) response = self._qs_client.create_data_set(**dataset_definition) http_status = response["ResponseMetadata"]["HTTPStatusCode"] if http_status != 201 and http_status != 200: self._log.error( - f"Unexpected response from create_dataset request: {http_status} " + f"Unexpected response from create_dataset request: " + f"data_set_id = {data_set_id}, http_status = {http_status}" ) raise Exception( f"Unexpected response from trying to create/update dataset : {json.dumps(response, indent=4)} " ) else: + self._log.info( + f"Data set ({data_set_id}) created successfully: http_status = {http_status}" + ) + return DataSetResponse(response["Arn"], response["DataSetId"])