diff --git a/.travis.yml b/.travis.yml index b216f90..bf9b763 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,11 +1,12 @@ --- # Add additional stages in the order of execution here, and then under the job:include: key stages: - - "lint" - - "test" - - "test-without-ansible" - -if: "type IN (pull_request)" # Add in "branch" as an option if desired for branch testing as well + - name: "lint" + - name: "test" + - name: "deploy-github" + if: "tag IS present" + - name: "deploy-pypi" + if: "tag IS present" language: "python" python: @@ -19,31 +20,50 @@ services: # Env, before_script, and script for test stage env: matrix: - - "ANSIBLE_VER=2.8.18" - - "ANSIBLE_VER=2.9.17" - - "ANSIBLE_VER=2.10.5" + - "ANSIBLE_VER=2.8.20 ANSIBLE_PACKAGE=ansible" + - "ANSIBLE_VER=2.9.20 ANSIBLE_PACKAGE=ansible" + - "ANSIBLE_VER=2.10.7 ANSIBLE_PACKAGE=ansible" + - "ANSIBLE_VER=2.10.8 ANSIBLE_PACKAGE=ansible-base" before_script: - "pip install invoke toml" script: - - "invoke build --nocache" + - "invoke build --no-cache" - "invoke pytest" + - "invoke pytest-without-ansible" jobs: include: - stage: "lint" before_script: - "pip install invoke toml" - - "invoke build --nocache" + - "invoke build --no-cache" script: - "invoke black" - - "invoke bandit" # Bandit fails to function on > Py3.8 https://github.com/PyCQA/bandit/issues/639 + - "invoke bandit" - "invoke pydocstyle" - "invoke flake8" - "invoke yamllint" - "invoke pylint" - - stage: "test-without-ansible" + - stage: "deploy-github" before_script: - - "pip install invoke toml" - - "invoke build --without-ansible --nocache" - script: "invoke pytest-without-ansible" + - "pip install poetry" + script: + - "poetry version $TRAVIS_TAG" + - "poetry build" + deploy: + provider: "releases" + api_key: "$GITHUB_AUTH_TOKEN" + file_glob: true + file: "dist/*" + skip_cleanup: true + "on": + all_branches: true + + - stage: "deploy-pypi" + before_script: + - "pip install poetry" + script: + - "poetry version $TRAVIS_TAG" + - "poetry config pypi-token.pypi $PYPI_TOKEN" + - "poetry publish --build" diff --git a/CHANGELOG.md b/CHANGELOG.md index 4fe38f6..bf76328 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,20 @@ # Changelog +## v1.1.0 - 2021-05-25 + +### Adds + +- [Custom Validators](docs/custom_validators.md) +- [Automatic mapping of schemas to data files](docs/mapping_data_files_to_schemas.md) +- Automatic implementation of draft7 format checker to support [IPv4 and IPv6 format declarations](https://json-schema.org/understanding-json-schema/reference/string.html#id12) in a JSON Schema definition [#94](https://github.com/networktocode/schema-enforcer/issues/94) + +### Changes + +- Removes Ansible as a mandatory dependency [#90](https://github.com/networktocode/schema-enforcer/issues/90) +- `docs/mapping_schemas.md` renamed to `docs/mapping_data_files_to_schemas.md` +- Simplifies the invoke tasks used for development +- Schema enforcer now exits if an invalid schema is found while loading schemas [#99](https://github.com/networktocode/schema-enforcer/issues/99) + ## v1.0.0 - 2021-01-26 Schema Enforcer Initial Release diff --git a/Dockerfile b/Dockerfile index eccfcd1..cf61499 100644 --- a/Dockerfile +++ b/Dockerfile @@ -6,15 +6,17 @@ RUN pip install --upgrade pip \ && pip install poetry WORKDIR /local -COPY pyproject.toml /local - -ARG ANSIBLE_VER="ignore" +# Poetry fails install without README.md being copied. +COPY pyproject.toml poetry.lock README.md /local/ +COPY schema_enforcer /local/schema_enforcer RUN poetry config virtualenvs.create false \ - && poetry install --no-interaction --no-ansi \ - # If ANSIBLE_VER is set (not default), uninstall the ansible version poetry installed and install the declared ansible version. - && if [ ! "$ANSIBLE_VER" = "ignore" ]; then pip uninstall -yq ansible ansible-base && pip install ansible==$ANSIBLE_VER; fi - -FROM base as without_ansible + && poetry install --no-interaction --no-ansi -RUN pip uninstall -yq ansible ansible-base +# ----------------------------------------------------------------------------- +# Defines stage with ansible installed +# ----------------------------------------------------------------------------- +FROM base as with_ansible +ARG ANSIBLE_PACKAGE +ARG ANSIBLE_VER +RUN pip install $ANSIBLE_PACKAGE==$ANSIBLE_VER diff --git a/README.md b/README.md index 8964887..3523d40 100755 --- a/README.md +++ b/README.md @@ -29,7 +29,7 @@ Schema Enforcer requires that two different elements be defined by the user: - Schema Definition Files: These are files which define the schema to which a given set of data should adhere. - Structured Data Files: These are files which contain data that should adhere to the schema defined in one (or multiple) of the schema definition files. -> Note: Data which needs to be validated against a schema definition can come in the form of Structured Data Files or Ansible host vars. In the interest of brevity and simplicity, this README.md contains discussion only of Structured Data Files -- for more information on how to use `schema-enforcer` with ansible host vars, see [the ansible_command README](docs/ansible_command.md) +> Note: Data which needs to be validated against a schema definition can come in the form of Structured Data Files or Ansible host vars. Ansible is not installed by default when schema-enforcer is installed. In order to use Ansible features, ansible must already be available or must be declared as an optional dependency when schema-enforcer upon installation. In the interest of brevity and simplicity, this README.md contains discussion only of Structured Data Files -- for more information on how to use `schema-enforcer` with ansible host vars, see [the ansible_command README](docs/ansible_command.md) When `schema-enforcer` runs, it assumes directory hierarchy which should be in place from the folder in which the tool is run. @@ -121,7 +121,7 @@ To run the schema validations, the command `schema-enforcer validate` can be run ```shell bash$ schema-enforcer validate -schema-enforcer validate +schema-enforcer validate ALL SCHEMA VALIDATION CHECKS PASSED ``` @@ -140,14 +140,14 @@ If we modify one of the addresses in the `chi-beijing-rt1/dns.yml` file so that ```yaml bash$ cat chi-beijing-rt1/dns.yml -# jsonschema: schemas/dns_servers +# jsonschema: schemas/dns_servers --- dns_servers: - address: true - address: "10.2.2.2" ``` ```shell -bash$ test-schema validate +bash$ test-schema validate FAIL | [ERROR] True is not of type 'string' [FILE] ./chi-beijing-rt1/dns.yml [PROPERTY] dns_servers:0:address bash$ echo $? 1 @@ -160,7 +160,7 @@ When a structured data file fails schema validation, `schema-enforcer` exits wit Schema enforcer will work with default settings, however, a `pyproject.toml` file can be placed at the root of the path in which `schema-enforcer` is run in order to override default settings or declare configuration for more advanced features. Inside of this `pyproject.toml` file, `tool.schema_enfocer` sections can be used to declare settings for schema enforcer. Take for example the `pyproject.toml` file in example 2. ```shell -bash$ cd examples/example2 && tree -L 2 +bash$ cd examples/example2 && tree -L 2 . ├── README.md ├── hostvars @@ -194,7 +194,8 @@ bash$ cat pyproject.toml Detailed documentation can be found in the README.md files inside of the `docs/` directory. - ["Introducing Schema Enforcer" blog post](https://blog.networktocode.com/post/introducing_schema_enforcer/) - [Using a pyproject.toml file for configuration](docs/configuration.md) +- [Mapping Structured Data Files to Schema Files](docs/mapping_data_files_to_schemas.md) - [The `ansible` command](docs/ansible_command.md) - [The `validate` command](docs/validate_command.md) -- [Mapping Structured Data Files to Schema Files](docs/mapping_schemas.md) - [The `schema` command](docs/schema_command.md) +- [Implementing custom validators](docs/custom_validators.md) diff --git a/docs/ansible_command.md b/docs/ansible_command.md index 8c44a99..d739021 100644 --- a/docs/ansible_command.md +++ b/docs/ansible_command.md @@ -9,6 +9,8 @@ The `ansible` command is used to check ansible inventory for adherence to a sche If all checks pass, `schema-enforcer` will inform the user that all tests have passed. +> NOTE | Schema enforcer does not come with ansible pre-installed, rather it is an optional dependency. The user can install schema enforcer bundled with ansible using one of `pip install schema-enforcer[ansible-base]` or `pip install schema-enforcer[ansible]`. Likewise, if ansible is already installed inside of the active python environment, the ansible package which is already installed will be used. + ## How the inventory is loaded When the `schema-enforcer ansible` command is run, an ansible inventory is constructed. Each host's properties are extracted from the ansible inventory into a single data structure per host, then this data structure is validated against all applicable schemas. For instance, take a look at the following example: diff --git a/docs/configuration.md b/docs/configuration.md index d94cd06..67d9af5 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -26,8 +26,7 @@ schema_file_exclude_filenames = [] data_file_search_directories = ["./"] data_file_extensions = [".json", ".yaml", ".yml"] data_file_exclude_filenames = [".yamllint.yml", ".travis.yml"] - -ansible_inventory = None +data_file_automap = true [tools.schema_enforcer.schema_mapping] ``` @@ -42,10 +41,12 @@ The table below enumerates each individual setting, it's expected type, it's def | definition_directory | string | "definitions" | The directory in which to search for schema definition references. These definitions are can be referenced by the schema files in the "schema_directory". This directory should be nested in the "main_directory" | | schema_directory | string | "schemas" | The directory in which to search for schemas. This directory should be nested in the "main_directory" | | test_directory | string | "tests" | The directory in which to search for valid and invalid unit tests for schemas | +| validator_directory | string | "validators" | The directory in which schema-enforcer searches for custom validators | | schema_file_extensions | list | [".json", ".yaml", ".yml"] | The extensions to use when searching for schema definition files | | schema_file_exclude_filenames | list | [] | The list of filenames to exclude when searching for schema files in the `schema_directory` directory | | data_file_search_directories | list | ["./"] The paths at which to start searching for files with structured data in them to validate against defined schemas. This path is relative to the directory in which `schema-enforcer` is executed. | data_file_extensions | list | [".json", ".yaml", ".yml"] | The extensions to use when searching for structured data files | | data_file_exclude_filenames | list | [".yamllint.yml", ".travis.yml"] | The list of filenames to exclude when searching for structured data files | +| data_file_automap | bool | true | Whether or not to map top level keys in a data file to the top level properties defined in a schema | | ansible_inventory | str | None | The ansible inventory file to use when building an inventory of hosts against which to check for schema adherence | | schema_mapping | dict | {} | A mapping of structured data file names (keys) to lists of schema IDs (values) against which the data file should be checked for adherence | \ No newline at end of file diff --git a/docs/custom_validators.md b/docs/custom_validators.md new file mode 100644 index 0000000..96c3900 --- /dev/null +++ b/docs/custom_validators.md @@ -0,0 +1,158 @@ +# Implementing custom validators + +With custom validators, you can implement business logic in Python. Schema-enforcer will automatically +load your plugins from the `validator_directory` and run them against your host data. + +The validator plugin provides two base classes: ModelValidation and JmesPathModelValidation. The former can be used +when you want to implement all logic and the latter can be used as a shortcut for jmespath validation. + +## BaseValidation + +Use this class to implement arbitrary validation logic in Python. In order to work correctly, your Python script must meet +the following criteria: + +1. Exist in the `validator_directory` dir. +2. Include a subclass of the BaseValidation class to correctly register with schema-enforcer. +3. Ensure you call `super().__init__()` in your class `__init__` if you override. +4. Provide a class method in your subclass with the following signature: +`def validate(data: dict, strict: bool):` + + * Data is a dictionary of variables on a per-host basis. + * Strict is set to true when the strict flag is set via the CLI. You can use this to offer strict validation behavior + or ignore it if not needed. + +The name of your class will be used as the schema-id for mapping purposes. You can override the default schema ID +by providing a class-level `id` variable. + +Helper functions are provided to add pass/fail results: + +``` +def add_validation_error(self, message: str, **kwargs): + """Add validator error to results. + Args: + message (str): error message + kwargs (optional): additional arguments to add to ValidationResult when required + """ + +def add_validation_pass(self, **kwargs): + """Add validator pass to results. + Args: + kwargs (optional): additional arguments to add to ValidationResult when required + """ +``` +In most cases, you will not need to provide kwargs. However, if you find a use case that requires updating other fields +in the ValidationResult, you can send the key/value pairs to update the result directly. This is for advanced users only. + +## JmesPathModelValidation + +Use this class for basic validation using [jmespath](https://jmespath.org/) expressions to query specific values in your data. In order to work correctly, your Python script must meet +the following criteria: + +1. Exist in the `validator_directory` dir. +2. Include a subclass of the JmesPathModelValidation class to correctly register with schema-enforcer. +3. Provide the following class level variables: + + * `top_level_properties`: Field for mapping of validator to data + * `id`: Schema ID to use for reporting purposes (optional - defaults to class name) + * `left`: Jmespath expression to query your host data + * `right`: Value or a compiled jmespath expression + * `operator`: Operator to use for comparison between left and right hand side of expression + * `error`: Message to report when validation fails + +### Supported operators: + +The class provides the following operators for basic use cases: + +``` +"gt": int(left) > int(right), +"gte": int(left) >= int(right), +"eq": left == right, +"lt": int(left) < int(right), +"lte": int(left) <= int(right), +"contains": right in left, +``` + +If you require additional logic or need to compare other types, use the BaseValidation class and create your own validate method. + +### Examples: + +#### Basic +``` +from schema_enforcer.schemas.validator import JmesPathModelValidation + +class CheckInterface(JmesPathModelValidation): # pylint: disable=too-few-public-methods + top_level_properties = ["interfaces"] + id = "CheckInterface" # pylint: disable=invalid-name + left = "interfaces.*[@.type=='core'][] | length([?@])" + right = 2 + operator = "gte" + error = "Less than two core interfaces" +``` + +#### With compiled jmespath expression +``` +import jmespath +from schema_enforcer.schemas.validator import JmesPathModelValidation + + +class CheckInterfaceIPv4(JmesPathModelValidation): # pylint: disable=too-few-public-methods + top_level_properties = ["interfaces"] + id = "CheckInterfaceIPv4" # pylint: disable=invalid-name + left = "interfaces.*[@.type=='core'][] | length([?@])" + right = jmespath.compile("interfaces.* | length([?@.type=='core'][].ipv4)") + operator = "eq" + error = "All core interfaces do not have IPv4 addresses" +``` + +## Running validators + +Custom validators are run with `schema-enforcer validate` and `schema-enforcer ansible` commands. + +You map validators to keys in your data with `top_level_properties` in your subclass or with `schema_enforcer_schema_ids` +in your data. Schema-enforcer uses the same process to map custom validators and schemas. Refer to the "Mapping Schemas" documentation +for more details. + +### Example - top_level_properties + +The CheckInterface validator has a top_level_properties of "interfaces": + +``` +class CheckInterface(JmesPathModelValidation): # pylint: disable=too-few-public-methods + top_level_properties = ["interfaces"] +``` + +With automapping enabled, this validator will apply to any host with a top-level `interfaces` key in the Ansible host_vars data: + +``` +--- +hostname: "az-phx-pe01" +pair_rtr: "az-phx-pe02" +interfaces: + MgmtEth0/0/CPU0/0: + ipv4: "172.16.1.1" + Loopback0: + ipv4: "192.168.1.1" + ipv6: "2001:db8:1::1" + GigabitEthernet0/0/0/0: + ipv4: "10.1.0.1" + ipv6: "2001:db8::" + peer: "az-phx-pe02" + peer_int: "GigabitEthernet0/0/0/0" + type: "core" + GigabitEthernet0/0/0/1: + ipv4: "10.1.0.37" + ipv6: "2001:db8::12" + peer: "co-den-p01" + peer_int: "GigabitEthernet0/0/0/2" + type: "core" +``` + +### Example - manual mapping + +Alternatively, you can manually map a validator in your Ansible host vars or other data files. + +``` +schema_enforcer_automap_default: false +schema_enforcer_schema_ids: + - "CheckInterface" +``` diff --git a/docs/mapping_data_files_to_schemas.md b/docs/mapping_data_files_to_schemas.md new file mode 100644 index 0000000..6f2be9a --- /dev/null +++ b/docs/mapping_data_files_to_schemas.md @@ -0,0 +1,223 @@ +# Mapping Schemas + +## Overview +Each schema must define an `$id` property. This is a top level key in the schema definition file. Its value is a string which uniquely identifies the schema. + +In order to validate structured data files against schema definitions, `schema-enforcer` must have a way of mapping structured data files to the schema ID(s) of the schema definition(s) they should adhere to. This is done in one of several ways: + +1) If any of the top level keys in a given data file match any top level properties defined in a schema definition, the data will be automatically mapped to the corresponding schema. +2) The `pyproject.toml` file can map structured data filenames to the ID of the schema(s) to which they should adhere. +3) Any file containing structured data can be decorated with a comment which instructs `schema-enforcer` to check the file for compliance with defined schema ID(s). + +By default, all methods will be used together. + +To check which structured data files will be examined by which schemas, the `schema-enforcer validate --show-checks` command can be run. + +```bash +bash$ cd examples/example3/ +bash$ schema-enforcer validate --show-checks +Structured Data File Schema ID +-------------------------------------------------------------------------------- +Structured Data File Schema ID +-------------------------------------------------------------------------------- +./hostvars/chi-beijing-rt1/dns.yml ['schemas/dns_servers'] +./hostvars/chi-beijing-rt1/syslog.yml ['schemas/syslog_servers'] +./hostvars/eng-london-rt1/dns.yml ['schemas/dns_servers'] +./hostvars/eng-london-rt1/ntp.yml ['schemas/ntp'] +./hostvars/fail-tests/dns.yml ['schemas/dns_servers'] +./hostvars/fail-tests/ntp.yml ['schemas/ntp'] +./hostvars/ger-berlin-rt1/dns.yml ['schemas/dns_servers'] +./hostvars/mex-mxc-rt1/hostvars.yml ['schemas/syslog_servers', 'schemas/dns_servers'] +./hostvars/usa-lax-rt1/dns.yml ['schemas/dns_servers'] +./hostvars/usa-lax-rt1/syslog.yml ['schemas/syslog_servers'] +./hostvars/usa-nyc-rt1/dns.yml ['schemas/dns_servers'] +./hostvars/usa-nyc-rt1/syslog.yml ['schemas/syslog_servers'] +``` + +## Using automap to map schemas + +In the following data sample, there is a single top-level key defined: `dns_servers` + +```yaml +bash$ cat ./hostvars/chi-beijing-rt1/dns.yml +--- +dns_servers: + - address: "10.1.1.1" + - address: "10.2.2.2" +``` + +In the following schema definition, there is a top level property defined of the same name: `dns_servers` + +```yaml +bash$ cat ./schema/schemas/dns.yml +--- +$schema: "http://json-schema.org/draft-07/schema#" +$id: "schemas/dns_servers" +description: "DNS Server Configuration schema." +type: "object" +properties: + dns_servers: + type: "array" + items: + type: "object" + properties: + name: + type: "string" + address: + type: "string" + format: "ipv4" + vrf: + type: "string" + required: + - "address" + uniqueItems: true +required: + - "dns_servers" +``` + +By default, `schema-enforcer` constructs a list of all top-level keys defined in a data file, then searches for schema definitions that also define any of the same top-level properties. The ID of any matching schema will automatically be included in the list of schemas IDs to check the data against. + +With this mapping mechanism, data-to-schema mappings are identified automatically and you do not need to separately declare mappings. + +```bash +bash$ tree +. +├── hostvars +│ └── chi-beijing-rt1 +│ └── dns.yml +└── schema + └── schemas + └── dns.yml + +4 directories, 2 files +``` + +The output of the `schema-enforcer validate` command shows that `./hostvars/chi-beijing-rt1/dns.yml`, which contains the data per the example above, is indeed being mapped to the `schemas/dns_servers` schema ID, which is the schema above. + +```bash +bash$ schema-enforcer validate --show-checks +Structured Data File Schema ID +-------------------------------------------------------------------------------- +./hostvars/chi-beijing-rt1/dns.yml ['schemas/dns_servers'] +``` + +While automapping is the easiest mapping mechanism to get started with, it can be beneficial to turn it off in favor of using one of the mechanisms described below. To do so, the following configuration can be put in a `pyproject.toml` file at the root of the path in which the schema and data files exist. + +```bash +bash $ cat pyproject.toml +[tool.schema_enforcer] + +data_file_automap = false +``` + +After toggling the `data_file_automap` setting to false, `schema-enforcer validate --show-checks` now shows that the data file located at `./hostvars/chi-beijing-rt1/dns.yml` will not be checked for adherence to the `schemas/dns_servers` schema. + +```bash +bash$ schema-enforcer validate --show-checks +Structured Data File Schema ID +-------------------------------------------------------------------------------- +./hostvars/chi-beijing-rt1/dns.yml [] +``` + +If multiple keys are defined in the data file and only one of them is defined in the schema, the data will still be checked against the schema to ensure it is schema valid. For instance, if the dns.yml data file above is updated so that it includes another key of ntp_servers, it will still be checked for adherence to the `schemas/dns_servers` schema even though no top level property called `dns_servers2` exists in the schema definition. + +```yaml +bash$ cat ./hostvars/chi-beijing-rt1/dns.yml +--- +dns_servers: + - address: "10.1.1.1" + - address: "10.2.2.2" +dns_servers2: + - address: "10.3.3.3" + - address: "10.4.4.4" +``` + +```bash +bash$ schema-enforcer validate --show-checks +Structured Data File Schema ID +-------------------------------------------------------------------------------- +./hostvars/chi-beijing-rt1/dns.yml ['schemas/dns_servers'] +``` + +Similarly, if another property which does not exist in the data file is added to the schema definition, the data in `./hostvars/chi-beijing-rt1/dns.yml` will still be checked against the schema. + +> Note, this behavior can cause issues if `additionalProperties: False` is set or if `required` is defined in the schema. In such cases it is best to use one of the other mechanisms for mapping data files to schema definitions. + +## Using the pyproject.toml file to map schemas + +In the pyproject.toml file, a `tool.schema_enforcer.schema_mapping` section can be defined which maps structured data files to schema IDs. + +```toml +[tool.schema_enforcer.schema_mapping] +'dns_v1.yml' = ['schemas/dns_servers'] +'dns_v2.yml' = ['schemas/dns_servers_v2'] +``` + +The values above are key/value pairs defined in TOML. The key is a string of the structured data filename, the value is a list of schema IDs. The schema_id must be defined in the schema definition file. The below text snippet from a YAML file shows the schema ID to which the structured data file `dns_v1.yml` above is being mapped. + +```yaml +--- +$schema: "http://json-schema.org/draft-07/schema#" +$id: "schemas/dns_servers" +``` + +> Note: Output truncated for brevity. + +If multiple schema IDs are defined in the list, the structured data file will be checked for adherence to all defined schema ids. + +## Using a decorator to map schemas + +A decorator can be used to map structured data files to the schemas they should be validated against. This can be done by adding a comment at the top of a YAML file which defines structured data in the form `# jsonschema: `. Multiple schemas can be defined here by separating schema IDs with a comma. + +```yaml +# jsonschema: schemas/ntp,schemas/ntpv2 +--- +ntp_servers: + - address: "10.6.6.6" + name: "ntp1" + - address: "10.7.7.7" + name: "ntp1" +ntp_authentication: false +ntp_logging: true +``` + +```bash +bash$ schema-enforcer validate --show-checks + +Instance File Schema +-------------------------------------------------------------------------------- +./hostvars/eng-london-rt1/ntp.yml ['schemas/ntp', 'schemas/ntpv2'] +``` + +> Note: This option only works for structured data files defined in YAML. This is because inline coments are not supported by JSON. + +## Multiple Definitions + +In the event that multiple mappings of different types exist, `schema-enforcer` will check the structured data files for adherence to all mapped schema IDs. In to following case, for instance, `schema-enforcer` will ensure that the structured data file `ntp.yml` adheres to both the `schemas/ntp` and `schemas/ntp2` schema definitions. + +```toml +[tool.schema_enforcer.schema_mapping] +'ntp.yml' = ['schemas/ntp2'] +``` + +```yaml +bash$ cat ntp.yml +# jsonschema: schemas/ntp +--- +ntp_servers: + - address: "10.6.6.6" + name: "ntp1" + - address: "10.7.7.7" + name: "ntp1" +ntp_authentication: false +ntp_logging: true +``` + +```bash +bash$ schema-enforcer validate --show-checks +Instance File Schema +-------------------------------------------------------------------------------- +./hostvars/eng-london-rt1/ntp.yml ['schemas/ntp2', 'schemas/ntp'] +``` + +> Note | If there were a schema definition with `ntp_servers` as its top level key, data would also be checked against that schema as well. \ No newline at end of file diff --git a/docs/mapping_schemas.md b/docs/mapping_schemas.md deleted file mode 100644 index 1cbc748..0000000 --- a/docs/mapping_schemas.md +++ /dev/null @@ -1,106 +0,0 @@ -# Mapping Schemas - -## Overview -In order to validate structured data files against schema definitions, `schema-enforcer` must have a way of mapping structured data files to the schema definition they should adhere to. This is done in one of two ways: - -1) The pyproject.toml file can map structured data filenames to the schema ID to which they should adhere. -2) Any file containing structured data can be decorated with a comment which instructs `schema-enforcer` to check the file for compliance against the a defined schema. - -To check which structured data files will be examined by which schemas, the `schema-enforcer validate --show-checks` command can be run. - -```cli -bash$ cd examples/example3/ -bash$ schema-enforcer validate --show-checks -Strucutred Data File Schema ID --------------------------------------------------------------------------------- -./hostvars/chi-beijing-rt1/dns.yml ['schemas/dns_servers'] -./hostvars/chi-beijing-rt1/syslog.yml ['schemas/syslog_servers'] -./hostvars/eng-london-rt1/dns.yml ['schemas/dns_servers'] -./hostvars/eng-london-rt1/ntp.yml ['schemas/ntp'] -./hostvars/fail-tests/dns.yml ['schemas/dns_servers'] -./hostvars/fail-tests/ntp.yml ['schemas/ntp'] -./hostvars/ger-berlin-rt1/dns.yml ['schemas/dns_servers'] -./hostvars/mex-mxc-rt1/dns.yml ['schemas/dns_servers'] -./hostvars/mex-mxc-rt1/syslog.yml ['schemas/syslog_servers'] -./hostvars/usa-lax-rt1/dns.yml ['schemas/dns_servers'] -./hostvars/usa-lax-rt1/syslog.yml ['schemas/syslog_servers'] -./hostvars/usa-nyc-rt1/dns.yml ['schemas/dns_servers'] -./hostvars/usa-nyc-rt1/syslog.yml ['schemas/syslog_servers'] -``` - -## Using the pyproject.toml file to map schemas - -In the pyproject.toml file, a `tools.schema_enforcer.schema_mapping` section can be defined which maps structured data files to schema IDs. - -```toml -[tools.schema_enforcer.schema_mapping] -'dns_v1.yml' = ['schemas/dns_servers'] -'dns_v2.yml' = ['schemas/dns_servers_v2'] -``` - -The values above are key/value pairs defined in TOML. The key is a string of the structured data filename, the value is a list of schema IDs. The schema_id must be defined in the schema definition file. The below text snippet from a YAML file shows the schema ID to which the structured data file `dns_v1.yml` above is being mapped. - -```yaml ---- -$schema: "http://json-schema.org/draft-07/schema#" -$id: "schemas/dns_servers" -``` - -> Note: Output truncated for brevity. - -If multiple schema IDs are defined in the list, the structured data file will be checked for adherence to all defined schema ids. - -## Using a decorator to map schemas - -A decorator can be used to map structured data files to the schemas they should be validated against. This can be done by adding a comment at the top of a YAML file which defines structured data in the form `# jsonschema: `. Multiple schemas can be defined here by separating schema IDs with a comma. - -```yaml -# jsonschema: schemas/ntp,schemas/ntpv2 ---- -ntp_servers: - - address: "10.6.6.6" - name: "ntp1" - - address: "10.7.7.7" - name: "ntp1" -ntp_authentication: false -ntp_logging: true -``` - -```cli -bash$ schema-enforcer validate --show-checks - -Instance File Schema --------------------------------------------------------------------------------- -./hostvars/eng-london-rt1/ntp.yml ['schemas/ntp', 'schemas/ntpv2'] -``` - -> Note: This option only works for structured data files defined in YAML. This is because inline coments are not supported by JSON. - -## Multiple Definitions - -In the event that a configuration section exists in the pyproject.toml file **and** a decorator exists in the structured data file, `schema-enforcer` will check the structured data files for adherenece to both schema IDs. In to following case, for instance, `schema-enforcer` will ensure that the structured data file `ntp.yml` adheres to both the `schemas/ntp` and `schemas/ntp2` schema definitions. - -```toml -[tools.schema_enforcer.schema_mapping] -'ntp.yml' = ['schemas/ntp2'] -``` - -```yaml -bash$ cat ntp.yml -# jsonschema: schemas/ntp ---- -ntp_servers: - - address: "10.6.6.6" - name: "ntp1" - - address: "10.7.7.7" - name: "ntp1" -ntp_authentication: false -ntp_logging: true -``` - -```cli -bash$ schema-enforcer validate --show-checks -Instance File Schema --------------------------------------------------------------------------------- -./hostvars/eng-london-rt1/ntp.yml ['schemas/ntp2', 'schemas/ntp'] -``` diff --git a/examples/ansible3/host_vars/az_phx_pe01/base.yml b/examples/ansible3/host_vars/az_phx_pe01/base.yml new file mode 100644 index 0000000..e8adf5e --- /dev/null +++ b/examples/ansible3/host_vars/az_phx_pe01/base.yml @@ -0,0 +1,22 @@ +--- +hostname: "az-phx-pe01" +pair_rtr: "az-phx-pe02" +upstreams: [] +interfaces: + MgmtEth0/0/CPU0/0: + ipv4: "172.16.1.1" + Loopback0: + ipv4: "192.168.1.1" + ipv6: "2001:db8:1::1" + GigabitEthernet0/0/0/0: + ipv4: "10.1.0.1" + ipv6: "2001:db8::" + peer: "az-phx-pe02" + peer_int: "GigabitEthernet0/0/0/0" + type: "core" + GigabitEthernet0/0/0/1: + ipv4: "10.1.0.37" + ipv6: "2001:db8::12" + peer: "co-den-p01" + peer_int: "GigabitEthernet0/0/0/2" + type: "core" diff --git a/examples/ansible3/host_vars/az_phx_pe02/base.yml b/examples/ansible3/host_vars/az_phx_pe02/base.yml new file mode 100644 index 0000000..3cfbd09 --- /dev/null +++ b/examples/ansible3/host_vars/az_phx_pe02/base.yml @@ -0,0 +1,22 @@ +--- +hostname: "az-phx-pe02" +pair_rtr: "az-phx-pe01" +upstreams: [] +interfaces: + MgmtEth0/0/CPU0/0: + ipv4: "172.16.1.2" + Loopback0: + ipv4: "192.168.1.2" + ipv6: "2001:db8:1::2" + GigabitEthernet0/0/0/0: + ipv4: "10.1.0.2" + ipv6: "2001:db8::1" + peer: "az-phx-pe01" + peer_int: "GigabitEthernet0/0/0/0" + type: "core" + GigabitEthernet0/0/0/1: + ipv4: "10.1.0.41" + ipv6: "2001:db8::14" + peer: "co-den-p02" + peer_int: "GigabitEthernet0/0/0/2" + type: "access" diff --git a/examples/ansible3/inventory.yml b/examples/ansible3/inventory.yml new file mode 100644 index 0000000..072655b --- /dev/null +++ b/examples/ansible3/inventory.yml @@ -0,0 +1,15 @@ +--- +all: + vars: + ansible_network_os: "iosxr" + ansible_user: "cisco" + ansible_password: "cisco" + ansible_connection: "netconf" + ansible_netconf_ssh_config: true + children: + pe_rtrs: + hosts: + az_phx_pe01: + ansible_host: "172.16.1.1" + az_phx_pe02: + ansible_host: "172.16.1.2" diff --git a/examples/ansible3/pyproject.toml b/examples/ansible3/pyproject.toml new file mode 100644 index 0000000..b4bd005 --- /dev/null +++ b/examples/ansible3/pyproject.toml @@ -0,0 +1,2 @@ +[tool.schema_enforcer] +ansible_inventory = "inventory.yml" \ No newline at end of file diff --git a/examples/ansible3/validators/check_interfaces.py b/examples/ansible3/validators/check_interfaces.py new file mode 100644 index 0000000..2c69fbf --- /dev/null +++ b/examples/ansible3/validators/check_interfaces.py @@ -0,0 +1,13 @@ +"""Example validator plugin.""" +from schema_enforcer.schemas.validator import JmesPathModelValidation + + +class CheckInterface(JmesPathModelValidation): # pylint: disable=too-few-public-methods + """Check that each device has more than one core uplink.""" + + top_level_properties = ["interfaces"] + id = "CheckInterface" # pylint: disable=invalid-name + left = "interfaces.*[@.type=='core'][] | length([?@])" + right = 2 + operator = "gte" + error = "Less than two core interfaces" diff --git a/examples/example3/hostvars/mex-mxc-rt1/dns.yml b/examples/example3/hostvars/mex-mxc-rt1/hostvars.yml similarity index 62% rename from examples/example3/hostvars/mex-mxc-rt1/dns.yml rename to examples/example3/hostvars/mex-mxc-rt1/hostvars.yml index d0ae645..ecf2031 100644 --- a/examples/example3/hostvars/mex-mxc-rt1/dns.yml +++ b/examples/example3/hostvars/mex-mxc-rt1/hostvars.yml @@ -2,3 +2,5 @@ dns_servers: - address: "10.12.12.12" - address: "10.13.13.13" +syslog_servers: + - address: "10.14.14.14" diff --git a/examples/example3/hostvars/mex-mxc-rt1/syslog.yml b/examples/example3/hostvars/mex-mxc-rt1/syslog.yml deleted file mode 100644 index eab3f85..0000000 --- a/examples/example3/hostvars/mex-mxc-rt1/syslog.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -syslog_servers: - - address: "10.14.14.14" diff --git a/poetry.lock b/poetry.lock index 6962904..b1721b9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,20 +1,20 @@ [[package]] name = "ansible" -version = "2.10.5" +version = "2.10.7" description = "Radically simple IT automation" category = "main" -optional = false +optional = true python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" [package.dependencies] -ansible-base = ">=2.10.4,<2.11" +ansible-base = ">=2.10.5,<2.11" [[package]] name = "ansible-base" -version = "2.10.5" +version = "2.10.8" description = "Radically simple IT automation" category = "main" -optional = false +optional = true python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" [package.dependencies] @@ -33,17 +33,16 @@ python-versions = "*" [[package]] name = "astroid" -version = "2.4.2" +version = "2.5.3" description = "An abstract syntax tree for Python with inference support." category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" [package.dependencies] -lazy-object-proxy = ">=1.4.0,<1.5.0" -six = ">=1.12,<2.0" +lazy-object-proxy = ">=1.4.0" typed-ast = {version = ">=1.4.0,<1.5", markers = "implementation_name == \"cpython\" and python_version < \"3.8\""} -wrapt = ">=1.11,<2.0" +wrapt = ">=1.11,<1.13" [[package]] name = "atomicwrites" @@ -112,10 +111,10 @@ python-versions = "*" [[package]] name = "cffi" -version = "1.14.4" +version = "1.14.5" description = "Foreign Function Interface for Python calling C code." category = "main" -optional = false +optional = true python-versions = "*" [package.dependencies] @@ -158,7 +157,7 @@ test = ["flake8 (3.7.8)", "hypothesis (3.55.3)"] [[package]] name = "coverage" -version = "5.3.1" +version = "5.5" description = "Code coverage measurement for Python" category = "dev" optional = false @@ -169,51 +168,51 @@ toml = ["toml"] [[package]] name = "cryptography" -version = "3.3.1" +version = "3.4.7" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" -optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*" +optional = true +python-versions = ">=3.6" [package.dependencies] cffi = ">=1.12" -six = ">=1.4.1" [package.extras] docs = ["sphinx (>=1.6.5,<1.8.0 || >1.8.0,<3.1.0 || >3.1.0,<3.1.1 || >3.1.1)", "sphinx-rtd-theme"] docstest = ["doc8", "pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"] pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] +sdist = ["setuptools-rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pytest (>=3.6.0,<3.9.0 || >3.9.0,<3.9.1 || >3.9.1,<3.9.2 || >3.9.2)", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,<3.79.2 || >3.79.2)"] +test = ["pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,<3.79.2 || >3.79.2)"] [[package]] name = "flake8" -version = "3.8.4" +version = "3.9.1" description = "the modular source code checker: pep8 pyflakes and co" category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [package.dependencies] importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} mccabe = ">=0.6.0,<0.7.0" -pycodestyle = ">=2.6.0a1,<2.7.0" -pyflakes = ">=2.2.0,<2.3.0" +pycodestyle = ">=2.7.0,<2.8.0" +pyflakes = ">=2.3.0,<2.4.0" [[package]] name = "gitdb" -version = "4.0.5" +version = "4.0.7" description = "Git Object Database" category = "dev" optional = false python-versions = ">=3.4" [package.dependencies] -smmap = ">=3.0.1,<4" +smmap = ">=3.0.1,<5" [[package]] name = "gitpython" -version = "3.1.12" +version = "3.1.14" description = "Python Git Library" category = "dev" optional = false @@ -232,7 +231,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "importlib-metadata" -version = "3.4.0" +version = "4.0.0" description = "Read metadata from Python packages" category = "main" optional = false @@ -244,7 +243,7 @@ zipp = ">=0.5" [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=3.5,<3.7.3 || >3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] [[package]] name = "invoke" @@ -256,7 +255,7 @@ python-versions = "*" [[package]] name = "isort" -version = "5.7.0" +version = "5.8.0" description = "A Python utility / library to sort Python imports." category = "dev" optional = false @@ -269,7 +268,7 @@ colors = ["colorama (>=0.4.3,<0.5.0)"] [[package]] name = "jinja2" -version = "2.11.2" +version = "2.11.3" description = "A very fast and expressive template engine." category = "main" optional = false @@ -281,6 +280,22 @@ MarkupSafe = ">=0.23" [package.extras] i18n = ["Babel (>=0.8)"] +[[package]] +name = "jmespath" +version = "0.10.0" +description = "JSON Matching Expressions" +category = "main" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "jsonpointer" +version = "2.1" +description = "Identify specific nodes in a JSON document (RFC 6901)" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + [[package]] name = "jsonref" version = "0.2" @@ -309,11 +324,11 @@ format_nongpl = ["idna", "jsonpointer (>1.13)", "webcolors", "rfc3986-validator [[package]] name = "lazy-object-proxy" -version = "1.4.3" +version = "1.6.0" description = "A fast and thorough lazy object proxy." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [[package]] name = "markupsafe" @@ -333,7 +348,7 @@ python-versions = "*" [[package]] name = "more-itertools" -version = "8.6.0" +version = "8.7.0" description = "More routines for operating on iterables, beyond itertools" category = "dev" optional = false @@ -341,7 +356,7 @@ python-versions = ">=3.5" [[package]] name = "packaging" -version = "20.8" +version = "20.9" description = "Core utilities for Python packages" category = "main" optional = false @@ -390,7 +405,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pycodestyle" -version = "2.6.0" +version = "2.7.0" description = "Python style guide checker" category = "dev" optional = false @@ -401,21 +416,23 @@ name = "pycparser" version = "2.20" description = "C parser in Python" category = "main" -optional = false +optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pydantic" -version = "1.7.3" +version = "1.8.1" description = "Data validation and settings management using python 3.6 type hinting" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.6.1" + +[package.dependencies] +typing-extensions = ">=3.7.4.3" [package.extras] dotenv = ["python-dotenv (>=0.10.4)"] email = ["email-validator (>=1.0.3)"] -typing_extensions = ["typing-extensions (>=3.7.2)"] [[package]] name = "pydocstyle" @@ -430,7 +447,7 @@ snowballstemmer = "*" [[package]] name = "pyflakes" -version = "2.2.0" +version = "2.3.1" description = "passive checker of Python programs" category = "dev" optional = false @@ -438,7 +455,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pygments" -version = "2.7.4" +version = "2.8.1" description = "Pygments is a syntax highlighting package written in Python." category = "main" optional = false @@ -446,19 +463,22 @@ python-versions = ">=3.5" [[package]] name = "pylint" -version = "2.6.0" +version = "2.7.4" description = "python code static checker" category = "dev" optional = false -python-versions = ">=3.5.*" +python-versions = "~=3.6" [package.dependencies] -astroid = ">=2.4.0,<=2.5" +astroid = ">=2.5.2,<2.7" colorama = {version = "*", markers = "sys_platform == \"win32\""} isort = ">=4.2.5,<6" mccabe = ">=0.6,<0.7" toml = ">=0.7.1" +[package.extras] +docs = ["sphinx (3.5.1)", "python-docs-theme (2020.12)"] + [[package]] name = "pyparsing" version = "2.4.7" @@ -508,7 +528,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [[package]] name = "regex" -version = "2020.11.13" +version = "2021.4.4" description = "Alternative regular expression module, to replace re." category = "dev" optional = false @@ -548,9 +568,28 @@ six = "*" fixture = ["fixtures"] test = ["fixtures", "mock", "purl", "pytest", "sphinx", "testrepository (>=0.0.18)", "testtools"] +[[package]] +name = "rfc3339-validator" +version = "0.1.3" +description = "A pure python RFC3339 validator" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.dependencies] +six = "*" + +[[package]] +name = "rfc3987" +version = "1.3.8" +description = "Parsing and validation of URIs (RFC 3986) and IRIs (RFC 3987)" +category = "main" +optional = false +python-versions = "*" + [[package]] name = "rich" -version = "9.8.2" +version = "9.13.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" category = "main" optional = false @@ -567,14 +606,14 @@ jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] [[package]] name = "ruamel.yaml" -version = "0.16.12" +version = "0.16.13" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" category = "main" optional = false python-versions = "*" [package.dependencies] -"ruamel.yaml.clib" = {version = ">=0.1.2", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.9\""} +"ruamel.yaml.clib" = {version = ">=0.1.2", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.10\""} [package.extras] docs = ["ryd"] @@ -598,11 +637,11 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "smmap" -version = "3.0.4" +version = "4.0.0" description = "A pure Python implementation of a sliding window memory map manager" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.5" [[package]] name = "snowballstemmer" @@ -624,6 +663,14 @@ python-versions = ">=3.6" importlib-metadata = {version = ">=1.7.0", markers = "python_version < \"3.8\""} pbr = ">=2.0.0,<2.1.0 || >2.1.0" +[[package]] +name = "strict-rfc3339" +version = "0.7" +description = "Strict, simple, lightweight RFC3339 functions" +category = "main" +optional = false +python-versions = "*" + [[package]] name = "termcolor" version = "1.1.0" @@ -642,7 +689,7 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "typed-ast" -version = "1.4.2" +version = "1.4.3" description = "a fork of Python 2 and 3 ast modules with type comment support" category = "dev" optional = false @@ -658,16 +705,16 @@ python-versions = "*" [[package]] name = "urllib3" -version = "1.26.2" +version = "1.26.4" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" [package.extras] -brotli = ["brotlipy (>=0.6.0)"] secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"] +brotli = ["brotlipy (>=0.6.0)"] [[package]] name = "wcwidth" @@ -687,11 +734,11 @@ python-versions = "*" [[package]] name = "yamllint" -version = "1.25.0" +version = "1.26.1" description = "A linter for YAML files." category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +python-versions = ">=3.5.*" [package.dependencies] pathspec = ">=0.5.3" @@ -699,35 +746,39 @@ pyyaml = "*" [[package]] name = "zipp" -version = "3.4.0" +version = "3.4.1" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = false python-versions = ">=3.6" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=3.5,<3.7.3 || >3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "jaraco.test (>=3.2.0)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] + +[extras] +ansible = ["ansible"] +ansible-base = ["ansible-base"] [metadata] lock-version = "1.1" python-versions = "^3.7" -content-hash = "e3b938e5ec45670a319811698f9d448d070508b91b89ac1b758a509ddc118d96" +content-hash = "b5223a0d3635a81efe93b2f51e921833dd2089183a576dba310f3ec4886cc7c8" [metadata.files] ansible = [ - {file = "ansible-2.10.5.tar.gz", hash = "sha256:9775229aae31336a624ca5afe5533fea5e49ef4daa96a96791dd9871b2d8b8d1"}, + {file = "ansible-2.10.7.tar.gz", hash = "sha256:9ff024500116d53c460cb09ea92e3c9404119f100d1d1ff0de69a9dafca561d5"}, ] ansible-base = [ - {file = "ansible-base-2.10.5.tar.gz", hash = "sha256:33ae323923b841f3d822f355380ce7c92610440362efeed67b4b39db41e555af"}, + {file = "ansible-base-2.10.8.tar.gz", hash = "sha256:f45df824051339d8bec32d7ab4e9e676498c05e2d9cfce6d54c9698a577e15e2"}, ] appdirs = [ {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, ] astroid = [ - {file = "astroid-2.4.2-py3-none-any.whl", hash = "sha256:bc58d83eb610252fd8de6363e39d4f1d0619c894b0ed24603b881c02e64c7386"}, - {file = "astroid-2.4.2.tar.gz", hash = "sha256:2f4078c2a41bf377eea06d71c9d2ba4eb8f6b1af2135bec27bbbb7d8f12bb703"}, + {file = "astroid-2.5.3-py3-none-any.whl", hash = "sha256:bea3f32799fbb8581f58431c12591bc20ce11cbc90ad82e2ea5717d94f2080d5"}, + {file = "astroid-2.5.3.tar.gz", hash = "sha256:ad63b8552c70939568966811a088ef0bc880f99a24a00834abd0e3681b514f91"}, ] atomicwrites = [ {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, @@ -750,40 +801,43 @@ certifi = [ {file = "certifi-2020.12.5.tar.gz", hash = "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c"}, ] cffi = [ - {file = "cffi-1.14.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ebb253464a5d0482b191274f1c8bf00e33f7e0b9c66405fbffc61ed2c839c775"}, - {file = "cffi-1.14.4-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:2c24d61263f511551f740d1a065eb0212db1dbbbbd241db758f5244281590c06"}, - {file = "cffi-1.14.4-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9f7a31251289b2ab6d4012f6e83e58bc3b96bd151f5b5262467f4bb6b34a7c26"}, - {file = "cffi-1.14.4-cp27-cp27m-win32.whl", hash = "sha256:5cf4be6c304ad0b6602f5c4e90e2f59b47653ac1ed9c662ed379fe48a8f26b0c"}, - {file = "cffi-1.14.4-cp27-cp27m-win_amd64.whl", hash = "sha256:f60567825f791c6f8a592f3c6e3bd93dd2934e3f9dac189308426bd76b00ef3b"}, - {file = "cffi-1.14.4-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:c6332685306b6417a91b1ff9fae889b3ba65c2292d64bd9245c093b1b284809d"}, - {file = "cffi-1.14.4-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d9efd8b7a3ef378dd61a1e77367f1924375befc2eba06168b6ebfa903a5e59ca"}, - {file = "cffi-1.14.4-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:51a8b381b16ddd370178a65360ebe15fbc1c71cf6f584613a7ea08bfad946698"}, - {file = "cffi-1.14.4-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:1d2c4994f515e5b485fd6d3a73d05526aa0fcf248eb135996b088d25dfa1865b"}, - {file = "cffi-1.14.4-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:af5c59122a011049aad5dd87424b8e65a80e4a6477419c0c1015f73fb5ea0293"}, - {file = "cffi-1.14.4-cp35-cp35m-win32.whl", hash = "sha256:594234691ac0e9b770aee9fcdb8fa02c22e43e5c619456efd0d6c2bf276f3eb2"}, - {file = "cffi-1.14.4-cp35-cp35m-win_amd64.whl", hash = "sha256:64081b3f8f6f3c3de6191ec89d7dc6c86a8a43911f7ecb422c60e90c70be41c7"}, - {file = "cffi-1.14.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f803eaa94c2fcda012c047e62bc7a51b0bdabda1cad7a92a522694ea2d76e49f"}, - {file = "cffi-1.14.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:105abaf8a6075dc96c1fe5ae7aae073f4696f2905fde6aeada4c9d2926752362"}, - {file = "cffi-1.14.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0638c3ae1a0edfb77c6765d487fee624d2b1ee1bdfeffc1f0b58c64d149e7eec"}, - {file = "cffi-1.14.4-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:7c6b1dece89874d9541fc974917b631406233ea0440d0bdfbb8e03bf39a49b3b"}, - {file = "cffi-1.14.4-cp36-cp36m-win32.whl", hash = "sha256:155136b51fd733fa94e1c2ea5211dcd4c8879869008fc811648f16541bf99668"}, - {file = "cffi-1.14.4-cp36-cp36m-win_amd64.whl", hash = "sha256:6bc25fc545a6b3d57b5f8618e59fc13d3a3a68431e8ca5fd4c13241cd70d0009"}, - {file = "cffi-1.14.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a7711edca4dcef1a75257b50a2fbfe92a65187c47dab5a0f1b9b332c5919a3fb"}, - {file = "cffi-1.14.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:00e28066507bfc3fe865a31f325c8391a1ac2916219340f87dfad602c3e48e5d"}, - {file = "cffi-1.14.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:798caa2a2384b1cbe8a2a139d80734c9db54f9cc155c99d7cc92441a23871c03"}, - {file = "cffi-1.14.4-cp37-cp37m-win32.whl", hash = "sha256:00a1ba5e2e95684448de9b89888ccd02c98d512064b4cb987d48f4b40aa0421e"}, - {file = "cffi-1.14.4-cp37-cp37m-win_amd64.whl", hash = "sha256:9cc46bc107224ff5b6d04369e7c595acb700c3613ad7bcf2e2012f62ece80c35"}, - {file = "cffi-1.14.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:df5169c4396adc04f9b0a05f13c074df878b6052430e03f50e68adf3a57aa28d"}, - {file = "cffi-1.14.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:9ffb888f19d54a4d4dfd4b3f29bc2c16aa4972f1c2ab9c4ab09b8ab8685b9c2b"}, - {file = "cffi-1.14.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8d6603078baf4e11edc4168a514c5ce5b3ba6e3e9c374298cb88437957960a53"}, - {file = "cffi-1.14.4-cp38-cp38-win32.whl", hash = "sha256:b4e248d1087abf9f4c10f3c398896c87ce82a9856494a7155823eb45a892395d"}, - {file = "cffi-1.14.4-cp38-cp38-win_amd64.whl", hash = "sha256:ec80dc47f54e6e9a78181ce05feb71a0353854cc26999db963695f950b5fb375"}, - {file = "cffi-1.14.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:840793c68105fe031f34d6a086eaea153a0cd5c491cde82a74b420edd0a2b909"}, - {file = "cffi-1.14.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:b18e0a9ef57d2b41f5c68beefa32317d286c3d6ac0484efd10d6e07491bb95dd"}, - {file = "cffi-1.14.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:045d792900a75e8b1e1b0ab6787dd733a8190ffcf80e8c8ceb2fb10a29ff238a"}, - {file = "cffi-1.14.4-cp39-cp39-win32.whl", hash = "sha256:ba4e9e0ae13fc41c6b23299545e5ef73055213e466bd107953e4a013a5ddd7e3"}, - {file = "cffi-1.14.4-cp39-cp39-win_amd64.whl", hash = "sha256:f032b34669220030f905152045dfa27741ce1a6db3324a5bc0b96b6c7420c87b"}, - {file = "cffi-1.14.4.tar.gz", hash = "sha256:1a465cbe98a7fd391d47dce4b8f7e5b921e6cd805ef421d04f5f66ba8f06086c"}, + {file = "cffi-1.14.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:bb89f306e5da99f4d922728ddcd6f7fcebb3241fc40edebcb7284d7514741991"}, + {file = "cffi-1.14.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:34eff4b97f3d982fb93e2831e6750127d1355a923ebaeeb565407b3d2f8d41a1"}, + {file = "cffi-1.14.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:99cd03ae7988a93dd00bcd9d0b75e1f6c426063d6f03d2f90b89e29b25b82dfa"}, + {file = "cffi-1.14.5-cp27-cp27m-win32.whl", hash = "sha256:65fa59693c62cf06e45ddbb822165394a288edce9e276647f0046e1ec26920f3"}, + {file = "cffi-1.14.5-cp27-cp27m-win_amd64.whl", hash = "sha256:51182f8927c5af975fece87b1b369f722c570fe169f9880764b1ee3bca8347b5"}, + {file = "cffi-1.14.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:43e0b9d9e2c9e5d152946b9c5fe062c151614b262fda2e7b201204de0b99e482"}, + {file = "cffi-1.14.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:cbde590d4faaa07c72bf979734738f328d239913ba3e043b1e98fe9a39f8b2b6"}, + {file = "cffi-1.14.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:5de7970188bb46b7bf9858eb6890aad302577a5f6f75091fd7cdd3ef13ef3045"}, + {file = "cffi-1.14.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:a465da611f6fa124963b91bf432d960a555563efe4ed1cc403ba5077b15370aa"}, + {file = "cffi-1.14.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:d42b11d692e11b6634f7613ad8df5d6d5f8875f5d48939520d351007b3c13406"}, + {file = "cffi-1.14.5-cp35-cp35m-win32.whl", hash = "sha256:72d8d3ef52c208ee1c7b2e341f7d71c6fd3157138abf1a95166e6165dd5d4369"}, + {file = "cffi-1.14.5-cp35-cp35m-win_amd64.whl", hash = "sha256:29314480e958fd8aab22e4a58b355b629c59bf5f2ac2492b61e3dc06d8c7a315"}, + {file = "cffi-1.14.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3d3dd4c9e559eb172ecf00a2a7517e97d1e96de2a5e610bd9b68cea3925b4892"}, + {file = "cffi-1.14.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:48e1c69bbacfc3d932221851b39d49e81567a4d4aac3b21258d9c24578280058"}, + {file = "cffi-1.14.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:69e395c24fc60aad6bb4fa7e583698ea6cc684648e1ffb7fe85e3c1ca131a7d5"}, + {file = "cffi-1.14.5-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:9e93e79c2551ff263400e1e4be085a1210e12073a31c2011dbbda14bda0c6132"}, + {file = "cffi-1.14.5-cp36-cp36m-win32.whl", hash = "sha256:58e3f59d583d413809d60779492342801d6e82fefb89c86a38e040c16883be53"}, + {file = "cffi-1.14.5-cp36-cp36m-win_amd64.whl", hash = "sha256:005a36f41773e148deac64b08f233873a4d0c18b053d37da83f6af4d9087b813"}, + {file = "cffi-1.14.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2894f2df484ff56d717bead0a5c2abb6b9d2bf26d6960c4604d5c48bbc30ee73"}, + {file = "cffi-1.14.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:0857f0ae312d855239a55c81ef453ee8fd24136eaba8e87a2eceba644c0d4c06"}, + {file = "cffi-1.14.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:cd2868886d547469123fadc46eac7ea5253ea7fcb139f12e1dfc2bbd406427d1"}, + {file = "cffi-1.14.5-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:35f27e6eb43380fa080dccf676dece30bef72e4a67617ffda586641cd4508d49"}, + {file = "cffi-1.14.5-cp37-cp37m-win32.whl", hash = "sha256:9ff227395193126d82e60319a673a037d5de84633f11279e336f9c0f189ecc62"}, + {file = "cffi-1.14.5-cp37-cp37m-win_amd64.whl", hash = "sha256:9cf8022fb8d07a97c178b02327b284521c7708d7c71a9c9c355c178ac4bbd3d4"}, + {file = "cffi-1.14.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8b198cec6c72df5289c05b05b8b0969819783f9418e0409865dac47288d2a053"}, + {file = "cffi-1.14.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:ad17025d226ee5beec591b52800c11680fca3df50b8b29fe51d882576e039ee0"}, + {file = "cffi-1.14.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6c97d7350133666fbb5cf4abdc1178c812cb205dc6f41d174a7b0f18fb93337e"}, + {file = "cffi-1.14.5-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8ae6299f6c68de06f136f1f9e69458eae58f1dacf10af5c17353eae03aa0d827"}, + {file = "cffi-1.14.5-cp38-cp38-win32.whl", hash = "sha256:b85eb46a81787c50650f2392b9b4ef23e1f126313b9e0e9013b35c15e4288e2e"}, + {file = "cffi-1.14.5-cp38-cp38-win_amd64.whl", hash = "sha256:1f436816fc868b098b0d63b8920de7d208c90a67212546d02f84fe78a9c26396"}, + {file = "cffi-1.14.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1071534bbbf8cbb31b498d5d9db0f274f2f7a865adca4ae429e147ba40f73dea"}, + {file = "cffi-1.14.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:9de2e279153a443c656f2defd67769e6d1e4163952b3c622dcea5b08a6405322"}, + {file = "cffi-1.14.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:6e4714cc64f474e4d6e37cfff31a814b509a35cb17de4fb1999907575684479c"}, + {file = "cffi-1.14.5-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:158d0d15119b4b7ff6b926536763dc0714313aa59e320ddf787502c70c4d4bee"}, + {file = "cffi-1.14.5-cp39-cp39-win32.whl", hash = "sha256:afb29c1ba2e5a3736f1c301d9d0abe3ec8b86957d04ddfa9d7a6a42b9367e396"}, + {file = "cffi-1.14.5-cp39-cp39-win_amd64.whl", hash = "sha256:f2d45f97ab6bb54753eab54fffe75aaf3de4ff2341c9daee1987ee1837636f1d"}, + {file = "cffi-1.14.5.tar.gz", hash = "sha256:fd78e5fee591709f32ef6edb9a015b4aa1a5022598e36227500c8f4e02328d9c"}, ] chardet = [ {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, @@ -801,91 +855,92 @@ commonmark = [ {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"}, ] coverage = [ - {file = "coverage-5.3.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:fabeeb121735d47d8eab8671b6b031ce08514c86b7ad8f7d5490a7b6dcd6267d"}, - {file = "coverage-5.3.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:7e4d159021c2029b958b2363abec4a11db0ce8cd43abb0d9ce44284cb97217e7"}, - {file = "coverage-5.3.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:378ac77af41350a8c6b8801a66021b52da8a05fd77e578b7380e876c0ce4f528"}, - {file = "coverage-5.3.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:e448f56cfeae7b1b3b5bcd99bb377cde7c4eb1970a525c770720a352bc4c8044"}, - {file = "coverage-5.3.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:cc44e3545d908ecf3e5773266c487ad1877be718d9dc65fc7eb6e7d14960985b"}, - {file = "coverage-5.3.1-cp27-cp27m-win32.whl", hash = "sha256:08b3ba72bd981531fd557f67beee376d6700fba183b167857038997ba30dd297"}, - {file = "coverage-5.3.1-cp27-cp27m-win_amd64.whl", hash = "sha256:8dacc4073c359f40fcf73aede8428c35f84639baad7e1b46fce5ab7a8a7be4bb"}, - {file = "coverage-5.3.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:ee2f1d1c223c3d2c24e3afbb2dd38be3f03b1a8d6a83ee3d9eb8c36a52bee899"}, - {file = "coverage-5.3.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:9a9d4ff06804920388aab69c5ea8a77525cf165356db70131616acd269e19b36"}, - {file = "coverage-5.3.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:782a5c7df9f91979a7a21792e09b34a658058896628217ae6362088b123c8500"}, - {file = "coverage-5.3.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:fda29412a66099af6d6de0baa6bd7c52674de177ec2ad2630ca264142d69c6c7"}, - {file = "coverage-5.3.1-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:f2c6888eada180814b8583c3e793f3f343a692fc802546eed45f40a001b1169f"}, - {file = "coverage-5.3.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:8f33d1156241c43755137288dea619105477961cfa7e47f48dbf96bc2c30720b"}, - {file = "coverage-5.3.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:b239711e774c8eb910e9b1ac719f02f5ae4bf35fa0420f438cdc3a7e4e7dd6ec"}, - {file = "coverage-5.3.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:f54de00baf200b4539a5a092a759f000b5f45fd226d6d25a76b0dff71177a714"}, - {file = "coverage-5.3.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:be0416074d7f253865bb67630cf7210cbc14eb05f4099cc0f82430135aaa7a3b"}, - {file = "coverage-5.3.1-cp35-cp35m-win32.whl", hash = "sha256:c46643970dff9f5c976c6512fd35768c4a3819f01f61169d8cdac3f9290903b7"}, - {file = "coverage-5.3.1-cp35-cp35m-win_amd64.whl", hash = "sha256:9a4f66259bdd6964d8cf26142733c81fb562252db74ea367d9beb4f815478e72"}, - {file = "coverage-5.3.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c6e5174f8ca585755988bc278c8bb5d02d9dc2e971591ef4a1baabdf2d99589b"}, - {file = "coverage-5.3.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:3911c2ef96e5ddc748a3c8b4702c61986628bb719b8378bf1e4a6184bbd48fe4"}, - {file = "coverage-5.3.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:c5ec71fd4a43b6d84ddb88c1df94572479d9a26ef3f150cef3dacefecf888105"}, - {file = "coverage-5.3.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f51dbba78d68a44e99d484ca8c8f604f17e957c1ca09c3ebc2c7e3bbd9ba0448"}, - {file = "coverage-5.3.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:a2070c5affdb3a5e751f24208c5c4f3d5f008fa04d28731416e023c93b275277"}, - {file = "coverage-5.3.1-cp36-cp36m-win32.whl", hash = "sha256:535dc1e6e68fad5355f9984d5637c33badbdc987b0c0d303ee95a6c979c9516f"}, - {file = "coverage-5.3.1-cp36-cp36m-win_amd64.whl", hash = "sha256:a4857f7e2bc6921dbd487c5c88b84f5633de3e7d416c4dc0bb70256775551a6c"}, - {file = "coverage-5.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fac3c432851038b3e6afe086f777732bcf7f6ebbfd90951fa04ee53db6d0bcdd"}, - {file = "coverage-5.3.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:cd556c79ad665faeae28020a0ab3bda6cd47d94bec48e36970719b0b86e4dcf4"}, - {file = "coverage-5.3.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:a66ca3bdf21c653e47f726ca57f46ba7fc1f260ad99ba783acc3e58e3ebdb9ff"}, - {file = "coverage-5.3.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:ab110c48bc3d97b4d19af41865e14531f300b482da21783fdaacd159251890e8"}, - {file = "coverage-5.3.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:e52d3d95df81c8f6b2a1685aabffadf2d2d9ad97203a40f8d61e51b70f191e4e"}, - {file = "coverage-5.3.1-cp37-cp37m-win32.whl", hash = "sha256:fa10fee7e32213f5c7b0d6428ea92e3a3fdd6d725590238a3f92c0de1c78b9d2"}, - {file = "coverage-5.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:ce6f3a147b4b1a8b09aae48517ae91139b1b010c5f36423fa2b866a8b23df879"}, - {file = "coverage-5.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:93a280c9eb736a0dcca19296f3c30c720cb41a71b1f9e617f341f0a8e791a69b"}, - {file = "coverage-5.3.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:3102bb2c206700a7d28181dbe04d66b30780cde1d1c02c5f3c165cf3d2489497"}, - {file = "coverage-5.3.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8ffd4b204d7de77b5dd558cdff986a8274796a1e57813ed005b33fd97e29f059"}, - {file = "coverage-5.3.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:a607ae05b6c96057ba86c811d9c43423f35e03874ffb03fbdcd45e0637e8b631"}, - {file = "coverage-5.3.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:3a3c3f8863255f3c31db3889f8055989527173ef6192a283eb6f4db3c579d830"}, - {file = "coverage-5.3.1-cp38-cp38-win32.whl", hash = "sha256:ff1330e8bc996570221b450e2d539134baa9465f5cb98aff0e0f73f34172e0ae"}, - {file = "coverage-5.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:3498b27d8236057def41de3585f317abae235dd3a11d33e01736ffedb2ef8606"}, - {file = "coverage-5.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ceb499d2b3d1d7b7ba23abe8bf26df5f06ba8c71127f188333dddcf356b4b63f"}, - {file = "coverage-5.3.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:3b14b1da110ea50c8bcbadc3b82c3933974dbeea1832e814aab93ca1163cd4c1"}, - {file = "coverage-5.3.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:76b2775dda7e78680d688daabcb485dc87cf5e3184a0b3e012e1d40e38527cc8"}, - {file = "coverage-5.3.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:cef06fb382557f66d81d804230c11ab292d94b840b3cb7bf4450778377b592f4"}, - {file = "coverage-5.3.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:6f61319e33222591f885c598e3e24f6a4be3533c1d70c19e0dc59e83a71ce27d"}, - {file = "coverage-5.3.1-cp39-cp39-win32.whl", hash = "sha256:cc6f8246e74dd210d7e2b56c76ceaba1cc52b025cd75dbe96eb48791e0250e98"}, - {file = "coverage-5.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:2757fa64e11ec12220968f65d086b7a29b6583d16e9a544c889b22ba98555ef1"}, - {file = "coverage-5.3.1-pp36-none-any.whl", hash = "sha256:723d22d324e7997a651478e9c5a3120a0ecbc9a7e94071f7e1954562a8806cf3"}, - {file = "coverage-5.3.1-pp37-none-any.whl", hash = "sha256:c89b558f8a9a5a6f2cfc923c304d49f0ce629c3bd85cb442ca258ec20366394c"}, - {file = "coverage-5.3.1.tar.gz", hash = "sha256:38f16b1317b8dd82df67ed5daa5f5e7c959e46579840d77a67a4ceb9cef0a50b"}, + {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, + {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"}, + {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"}, + {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"}, + {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"}, + {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"}, + {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"}, + {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"}, + {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"}, + {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"}, + {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"}, + {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"}, + {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"}, + {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"}, + {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"}, + {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"}, + {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"}, + {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"}, + {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"}, + {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"}, + {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"}, + {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"}, + {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"}, + {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"}, + {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"}, + {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"}, + {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"}, + {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"}, + {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"}, + {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"}, + {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"}, + {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"}, + {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"}, + {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"}, + {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"}, + {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"}, + {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"}, + {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"}, + {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"}, + {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"}, + {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"}, + {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"}, + {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"}, + {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"}, + {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"}, + {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"}, + {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"}, + {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, ] cryptography = [ - {file = "cryptography-3.3.1-cp27-cp27m-macosx_10_10_x86_64.whl", hash = "sha256:c366df0401d1ec4e548bebe8f91d55ebcc0ec3137900d214dd7aac8427ef3030"}, - {file = "cryptography-3.3.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9f6b0492d111b43de5f70052e24c1f0951cb9e6022188ebcb1cc3a3d301469b0"}, - {file = "cryptography-3.3.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a69bd3c68b98298f490e84519b954335154917eaab52cf582fa2c5c7efc6e812"}, - {file = "cryptography-3.3.1-cp27-cp27m-win32.whl", hash = "sha256:84ef7a0c10c24a7773163f917f1cb6b4444597efd505a8aed0a22e8c4780f27e"}, - {file = "cryptography-3.3.1-cp27-cp27m-win_amd64.whl", hash = "sha256:594a1db4511bc4d960571536abe21b4e5c3003e8750ab8365fafce71c5d86901"}, - {file = "cryptography-3.3.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:0003a52a123602e1acee177dc90dd201f9bb1e73f24a070db7d36c588e8f5c7d"}, - {file = "cryptography-3.3.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:83d9d2dfec70364a74f4e7c70ad04d3ca2e6a08b703606993407bf46b97868c5"}, - {file = "cryptography-3.3.1-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:dc42f645f8f3a489c3dd416730a514e7a91a59510ddaadc09d04224c098d3302"}, - {file = "cryptography-3.3.1-cp36-abi3-manylinux1_x86_64.whl", hash = "sha256:788a3c9942df5e4371c199d10383f44a105d67d401fb4304178020142f020244"}, - {file = "cryptography-3.3.1-cp36-abi3-manylinux2010_x86_64.whl", hash = "sha256:69e836c9e5ff4373ce6d3ab311c1a2eed274793083858d3cd4c7d12ce20d5f9c"}, - {file = "cryptography-3.3.1-cp36-abi3-manylinux2014_aarch64.whl", hash = "sha256:9e21301f7a1e7c03dbea73e8602905a4ebba641547a462b26dd03451e5769e7c"}, - {file = "cryptography-3.3.1-cp36-abi3-win32.whl", hash = "sha256:b4890d5fb9b7a23e3bf8abf5a8a7da8e228f1e97dc96b30b95685df840b6914a"}, - {file = "cryptography-3.3.1-cp36-abi3-win_amd64.whl", hash = "sha256:0e85aaae861d0485eb5a79d33226dd6248d2a9f133b81532c8f5aae37de10ff7"}, - {file = "cryptography-3.3.1.tar.gz", hash = "sha256:7e177e4bea2de937a584b13645cab32f25e3d96fc0bc4a4cf99c27dc77682be6"}, + {file = "cryptography-3.4.7-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:3d8427734c781ea5f1b41d6589c293089704d4759e34597dce91014ac125aad1"}, + {file = "cryptography-3.4.7-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:8e56e16617872b0957d1c9742a3f94b43533447fd78321514abbe7db216aa250"}, + {file = "cryptography-3.4.7-cp36-abi3-manylinux2010_x86_64.whl", hash = "sha256:37340614f8a5d2fb9aeea67fd159bfe4f5f4ed535b1090ce8ec428b2f15a11f2"}, + {file = "cryptography-3.4.7-cp36-abi3-manylinux2014_aarch64.whl", hash = "sha256:240f5c21aef0b73f40bb9f78d2caff73186700bf1bc6b94285699aff98cc16c6"}, + {file = "cryptography-3.4.7-cp36-abi3-manylinux2014_x86_64.whl", hash = "sha256:1e056c28420c072c5e3cb36e2b23ee55e260cb04eee08f702e0edfec3fb51959"}, + {file = "cryptography-3.4.7-cp36-abi3-win32.whl", hash = "sha256:0f1212a66329c80d68aeeb39b8a16d54ef57071bf22ff4e521657b27372e327d"}, + {file = "cryptography-3.4.7-cp36-abi3-win_amd64.whl", hash = "sha256:de4e5f7f68220d92b7637fc99847475b59154b7a1b3868fb7385337af54ac9ca"}, + {file = "cryptography-3.4.7-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:26965837447f9c82f1855e0bc8bc4fb910240b6e0d16a664bb722df3b5b06873"}, + {file = "cryptography-3.4.7-pp36-pypy36_pp73-manylinux2014_x86_64.whl", hash = "sha256:eb8cc2afe8b05acbd84a43905832ec78e7b3873fb124ca190f574dca7389a87d"}, + {file = "cryptography-3.4.7-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:7ec5d3b029f5fa2b179325908b9cd93db28ab7b85bb6c1db56b10e0b54235177"}, + {file = "cryptography-3.4.7-pp37-pypy37_pp73-manylinux2014_x86_64.whl", hash = "sha256:ee77aa129f481be46f8d92a1a7db57269a2f23052d5f2433b4621bb457081cc9"}, + {file = "cryptography-3.4.7.tar.gz", hash = "sha256:3d10de8116d25649631977cb37da6cbdd2d6fa0e0281d014a5b7d337255ca713"}, ] flake8 = [ - {file = "flake8-3.8.4-py2.py3-none-any.whl", hash = "sha256:749dbbd6bfd0cf1318af27bf97a14e28e5ff548ef8e5b1566ccfb25a11e7c839"}, - {file = "flake8-3.8.4.tar.gz", hash = "sha256:aadae8761ec651813c24be05c6f7b4680857ef6afaae4651a4eccaef97ce6c3b"}, + {file = "flake8-3.9.1-py2.py3-none-any.whl", hash = "sha256:3b9f848952dddccf635be78098ca75010f073bfe14d2c6bda867154bea728d2a"}, + {file = "flake8-3.9.1.tar.gz", hash = "sha256:1aa8990be1e689d96c745c5682b687ea49f2e05a443aff1f8251092b0014e378"}, ] gitdb = [ - {file = "gitdb-4.0.5-py3-none-any.whl", hash = "sha256:91f36bfb1ab7949b3b40e23736db18231bf7593edada2ba5c3a174a7b23657ac"}, - {file = "gitdb-4.0.5.tar.gz", hash = "sha256:c9e1f2d0db7ddb9a704c2a0217be31214e91a4fe1dea1efad19ae42ba0c285c9"}, + {file = "gitdb-4.0.7-py3-none-any.whl", hash = "sha256:6c4cc71933456991da20917998acbe6cf4fb41eeaab7d6d67fbc05ecd4c865b0"}, + {file = "gitdb-4.0.7.tar.gz", hash = "sha256:96bf5c08b157a666fec41129e6d327235284cca4c81e92109260f353ba138005"}, ] gitpython = [ - {file = "GitPython-3.1.12-py3-none-any.whl", hash = "sha256:867ec3dfb126aac0f8296b19fb63b8c4a399f32b4b6fafe84c4b10af5fa9f7b5"}, - {file = "GitPython-3.1.12.tar.gz", hash = "sha256:42dbefd8d9e2576c496ed0059f3103dcef7125b9ce16f9d5f9c834aed44a1dac"}, + {file = "GitPython-3.1.14-py3-none-any.whl", hash = "sha256:3283ae2fba31c913d857e12e5ba5f9a7772bbc064ae2bb09efafa71b0dd4939b"}, + {file = "GitPython-3.1.14.tar.gz", hash = "sha256:be27633e7509e58391f10207cd32b2a6cf5b908f92d9cd30da2e514e1137af61"}, ] idna = [ {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"}, ] importlib-metadata = [ - {file = "importlib_metadata-3.4.0-py3-none-any.whl", hash = "sha256:ace61d5fc652dc280e7b6b4ff732a9c2d40db2c0f92bc6cb74e07b73d53a1771"}, - {file = "importlib_metadata-3.4.0.tar.gz", hash = "sha256:fa5daa4477a7414ae34e95942e4dd07f62adf589143c875c133c1e53c4eff38d"}, + {file = "importlib_metadata-4.0.0-py3-none-any.whl", hash = "sha256:19192b88d959336bfa6bdaaaef99aeafec179eca19c47c804e555703ee5f07ef"}, + {file = "importlib_metadata-4.0.0.tar.gz", hash = "sha256:2e881981c9748d7282b374b68e759c87745c25427b67ecf0cc67fb6637a1bff9"}, ] invoke = [ {file = "invoke-1.5.0-py2-none-any.whl", hash = "sha256:da7c2d0be71be83ffd6337e078ef9643f41240024d6b2659e7b46e0b251e339f"}, @@ -893,12 +948,20 @@ invoke = [ {file = "invoke-1.5.0.tar.gz", hash = "sha256:f0c560075b5fb29ba14dad44a7185514e94970d1b9d57dcd3723bec5fed92650"}, ] isort = [ - {file = "isort-5.7.0-py3-none-any.whl", hash = "sha256:fff4f0c04e1825522ce6949973e83110a6e907750cd92d128b0d14aaaadbffdc"}, - {file = "isort-5.7.0.tar.gz", hash = "sha256:c729845434366216d320e936b8ad6f9d681aab72dc7cbc2d51bedc3582f3ad1e"}, + {file = "isort-5.8.0-py3-none-any.whl", hash = "sha256:2bb1680aad211e3c9944dbce1d4ba09a989f04e238296c87fe2139faa26d655d"}, + {file = "isort-5.8.0.tar.gz", hash = "sha256:0a943902919f65c5684ac4e0154b1ad4fac6dcaa5d9f3426b732f1c8b5419be6"}, ] jinja2 = [ - {file = "Jinja2-2.11.2-py2.py3-none-any.whl", hash = "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035"}, - {file = "Jinja2-2.11.2.tar.gz", hash = "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0"}, + {file = "Jinja2-2.11.3-py2.py3-none-any.whl", hash = "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419"}, + {file = "Jinja2-2.11.3.tar.gz", hash = "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6"}, +] +jmespath = [ + {file = "jmespath-0.10.0-py2.py3-none-any.whl", hash = "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f"}, + {file = "jmespath-0.10.0.tar.gz", hash = "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9"}, +] +jsonpointer = [ + {file = "jsonpointer-2.1-py2.py3-none-any.whl", hash = "sha256:150f80c5badd02c757da6644852f612f88e8b4bc2f9852dcbf557c8738919686"}, + {file = "jsonpointer-2.1.tar.gz", hash = "sha256:5a34b698db1eb79ceac454159d3f7c12a451a91f6334a4f638454327b7a89962"}, ] jsonref = [ {file = "jsonref-0.2-py3-none-any.whl", hash = "sha256:b1e82fa0b62e2c2796a13e5401fe51790b248f6d9bf9d7212a3e31a3501b291f"}, @@ -909,27 +972,28 @@ jsonschema = [ {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, ] lazy-object-proxy = [ - {file = "lazy-object-proxy-1.4.3.tar.gz", hash = "sha256:f3900e8a5de27447acbf900b4750b0ddfd7ec1ea7fbaf11dfa911141bc522af0"}, - {file = "lazy_object_proxy-1.4.3-cp27-cp27m-macosx_10_13_x86_64.whl", hash = "sha256:a2238e9d1bb71a56cd710611a1614d1194dc10a175c1e08d75e1a7bcc250d442"}, - {file = "lazy_object_proxy-1.4.3-cp27-cp27m-win32.whl", hash = "sha256:efa1909120ce98bbb3777e8b6f92237f5d5c8ea6758efea36a473e1d38f7d3e4"}, - {file = "lazy_object_proxy-1.4.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4677f594e474c91da97f489fea5b7daa17b5517190899cf213697e48d3902f5a"}, - {file = "lazy_object_proxy-1.4.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:0c4b206227a8097f05c4dbdd323c50edf81f15db3b8dc064d08c62d37e1a504d"}, - {file = "lazy_object_proxy-1.4.3-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:d945239a5639b3ff35b70a88c5f2f491913eb94871780ebfabb2568bd58afc5a"}, - {file = "lazy_object_proxy-1.4.3-cp34-cp34m-win32.whl", hash = "sha256:9651375199045a358eb6741df3e02a651e0330be090b3bc79f6d0de31a80ec3e"}, - {file = "lazy_object_proxy-1.4.3-cp34-cp34m-win_amd64.whl", hash = "sha256:eba7011090323c1dadf18b3b689845fd96a61ba0a1dfbd7f24b921398affc357"}, - {file = "lazy_object_proxy-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:48dab84ebd4831077b150572aec802f303117c8cc5c871e182447281ebf3ac50"}, - {file = "lazy_object_proxy-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:ca0a928a3ddbc5725be2dd1cf895ec0a254798915fb3a36af0964a0a4149e3db"}, - {file = "lazy_object_proxy-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:194d092e6f246b906e8f70884e620e459fc54db3259e60cf69a4d66c3fda3449"}, - {file = "lazy_object_proxy-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:97bb5884f6f1cdce0099f86b907aa41c970c3c672ac8b9c8352789e103cf3156"}, - {file = "lazy_object_proxy-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:cb2c7c57005a6804ab66f106ceb8482da55f5314b7fcb06551db1edae4ad1531"}, - {file = "lazy_object_proxy-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:8d859b89baf8ef7f8bc6b00aa20316483d67f0b1cbf422f5b4dc56701c8f2ffb"}, - {file = "lazy_object_proxy-1.4.3-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:1be7e4c9f96948003609aa6c974ae59830a6baecc5376c25c92d7d697e684c08"}, - {file = "lazy_object_proxy-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d74bb8693bf9cf75ac3b47a54d716bbb1a92648d5f781fc799347cfc95952383"}, - {file = "lazy_object_proxy-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:9b15f3f4c0f35727d3a0fba4b770b3c4ebbb1fa907dbcc046a1d2799f3edd142"}, - {file = "lazy_object_proxy-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9254f4358b9b541e3441b007a0ea0764b9d056afdeafc1a5569eee1cc6c1b9ea"}, - {file = "lazy_object_proxy-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:a6ae12d08c0bf9909ce12385803a543bfe99b95fe01e752536a60af2b7797c62"}, - {file = "lazy_object_proxy-1.4.3-cp38-cp38-win32.whl", hash = "sha256:5541cada25cd173702dbd99f8e22434105456314462326f06dba3e180f203dfd"}, - {file = "lazy_object_proxy-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:59f79fef100b09564bc2df42ea2d8d21a64fdcda64979c0fa3db7bdaabaf6239"}, + {file = "lazy-object-proxy-1.6.0.tar.gz", hash = "sha256:489000d368377571c6f982fba6497f2aa13c6d1facc40660963da62f5c379726"}, + {file = "lazy_object_proxy-1.6.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:c6938967f8528b3668622a9ed3b31d145fab161a32f5891ea7b84f6b790be05b"}, + {file = "lazy_object_proxy-1.6.0-cp27-cp27m-win32.whl", hash = "sha256:ebfd274dcd5133e0afae738e6d9da4323c3eb021b3e13052d8cbd0e457b1256e"}, + {file = "lazy_object_proxy-1.6.0-cp27-cp27m-win_amd64.whl", hash = "sha256:ed361bb83436f117f9917d282a456f9e5009ea12fd6de8742d1a4752c3017e93"}, + {file = "lazy_object_proxy-1.6.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d900d949b707778696fdf01036f58c9876a0d8bfe116e8d220cfd4b15f14e741"}, + {file = "lazy_object_proxy-1.6.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5743a5ab42ae40caa8421b320ebf3a998f89c85cdc8376d6b2e00bd12bd1b587"}, + {file = "lazy_object_proxy-1.6.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:bf34e368e8dd976423396555078def5cfc3039ebc6fc06d1ae2c5a65eebbcde4"}, + {file = "lazy_object_proxy-1.6.0-cp36-cp36m-win32.whl", hash = "sha256:b579f8acbf2bdd9ea200b1d5dea36abd93cabf56cf626ab9c744a432e15c815f"}, + {file = "lazy_object_proxy-1.6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:4f60460e9f1eb632584c9685bccea152f4ac2130e299784dbaf9fae9f49891b3"}, + {file = "lazy_object_proxy-1.6.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d7124f52f3bd259f510651450e18e0fd081ed82f3c08541dffc7b94b883aa981"}, + {file = "lazy_object_proxy-1.6.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:22ddd618cefe54305df49e4c069fa65715be4ad0e78e8d252a33debf00f6ede2"}, + {file = "lazy_object_proxy-1.6.0-cp37-cp37m-win32.whl", hash = "sha256:9d397bf41caad3f489e10774667310d73cb9c4258e9aed94b9ec734b34b495fd"}, + {file = "lazy_object_proxy-1.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a5045889cc2729033b3e604d496c2b6f588c754f7a62027ad4437a7ecc4837"}, + {file = "lazy_object_proxy-1.6.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:17e0967ba374fc24141738c69736da90e94419338fd4c7c7bef01ee26b339653"}, + {file = "lazy_object_proxy-1.6.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:410283732af311b51b837894fa2f24f2c0039aa7f220135192b38fcc42bd43d3"}, + {file = "lazy_object_proxy-1.6.0-cp38-cp38-win32.whl", hash = "sha256:85fb7608121fd5621cc4377a8961d0b32ccf84a7285b4f1d21988b2eae2868e8"}, + {file = "lazy_object_proxy-1.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:d1c2676e3d840852a2de7c7d5d76407c772927addff8d742b9808fe0afccebdf"}, + {file = "lazy_object_proxy-1.6.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:b865b01a2e7f96db0c5d12cfea590f98d8c5ba64ad222300d93ce6ff9138bcad"}, + {file = "lazy_object_proxy-1.6.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:4732c765372bd78a2d6b2150a6e99d00a78ec963375f236979c0626b97ed8e43"}, + {file = "lazy_object_proxy-1.6.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:9698110e36e2df951c7c36b6729e96429c9c32b3331989ef19976592c5f3c77a"}, + {file = "lazy_object_proxy-1.6.0-cp39-cp39-win32.whl", hash = "sha256:1fee665d2638491f4d6e55bd483e15ef21f6c8c2095f235fef72601021e64f61"}, + {file = "lazy_object_proxy-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:f5144c75445ae3ca2057faac03fda5a902eff196702b0a24daf1d6ce0650514b"}, ] markupsafe = [ {file = "MarkupSafe-1.1.1-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161"}, @@ -971,12 +1035,12 @@ mccabe = [ {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, ] more-itertools = [ - {file = "more-itertools-8.6.0.tar.gz", hash = "sha256:b3a9005928e5bed54076e6e549c792b306fddfe72b2d1d22dd63d42d5d3899cf"}, - {file = "more_itertools-8.6.0-py3-none-any.whl", hash = "sha256:8e1a2a43b2f2727425f2b5839587ae37093f19153dc26c0927d1048ff6557330"}, + {file = "more-itertools-8.7.0.tar.gz", hash = "sha256:c5d6da9ca3ff65220c3bfd2a8db06d698f05d4d2b9be57e1deb2be5a45019713"}, + {file = "more_itertools-8.7.0-py3-none-any.whl", hash = "sha256:5652a9ac72209ed7df8d9c15daf4e1aa0e3d2ccd3c87f8265a0673cd9cbc9ced"}, ] packaging = [ - {file = "packaging-20.8-py2.py3-none-any.whl", hash = "sha256:24e0da08660a87484d1602c30bb4902d74816b6985b93de36926f5bc95741858"}, - {file = "packaging-20.8.tar.gz", hash = "sha256:78598185a7008a470d64526a8059de9aaa449238f280fc9eb6b13ba6c4109093"}, + {file = "packaging-20.9-py2.py3-none-any.whl", hash = "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"}, + {file = "packaging-20.9.tar.gz", hash = "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5"}, ] pathspec = [ {file = "pathspec-0.8.1-py2.py3-none-any.whl", hash = "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d"}, @@ -995,52 +1059,52 @@ py = [ {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, ] pycodestyle = [ - {file = "pycodestyle-2.6.0-py2.py3-none-any.whl", hash = "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367"}, - {file = "pycodestyle-2.6.0.tar.gz", hash = "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e"}, + {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, + {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, ] pycparser = [ {file = "pycparser-2.20-py2.py3-none-any.whl", hash = "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"}, {file = "pycparser-2.20.tar.gz", hash = "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"}, ] pydantic = [ - {file = "pydantic-1.7.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c59ea046aea25be14dc22d69c97bee629e6d48d2b2ecb724d7fe8806bf5f61cd"}, - {file = "pydantic-1.7.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a4143c8d0c456a093387b96e0f5ee941a950992904d88bc816b4f0e72c9a0009"}, - {file = "pydantic-1.7.3-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:d8df4b9090b595511906fa48deda47af04e7d092318bfb291f4d45dfb6bb2127"}, - {file = "pydantic-1.7.3-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:514b473d264671a5c672dfb28bdfe1bf1afd390f6b206aa2ec9fed7fc592c48e"}, - {file = "pydantic-1.7.3-cp36-cp36m-win_amd64.whl", hash = "sha256:dba5c1f0a3aeea5083e75db9660935da90216f8a81b6d68e67f54e135ed5eb23"}, - {file = "pydantic-1.7.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:59e45f3b694b05a69032a0d603c32d453a23f0de80844fb14d55ab0c6c78ff2f"}, - {file = "pydantic-1.7.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:5b24e8a572e4b4c18f614004dda8c9f2c07328cb5b6e314d6e1bbd536cb1a6c1"}, - {file = "pydantic-1.7.3-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:b2b054d095b6431cdda2f852a6d2f0fdec77686b305c57961b4c5dd6d863bf3c"}, - {file = "pydantic-1.7.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:025bf13ce27990acc059d0c5be46f416fc9b293f45363b3d19855165fee1874f"}, - {file = "pydantic-1.7.3-cp37-cp37m-win_amd64.whl", hash = "sha256:6e3874aa7e8babd37b40c4504e3a94cc2023696ced5a0500949f3347664ff8e2"}, - {file = "pydantic-1.7.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e682f6442ebe4e50cb5e1cfde7dda6766fb586631c3e5569f6aa1951fd1a76ef"}, - {file = "pydantic-1.7.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:185e18134bec5ef43351149fe34fda4758e53d05bb8ea4d5928f0720997b79ef"}, - {file = "pydantic-1.7.3-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:f5b06f5099e163295b8ff5b1b71132ecf5866cc6e7f586d78d7d3fd6e8084608"}, - {file = "pydantic-1.7.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:24ca47365be2a5a3cc3f4a26dcc755bcdc9f0036f55dcedbd55663662ba145ec"}, - {file = "pydantic-1.7.3-cp38-cp38-win_amd64.whl", hash = "sha256:d1fe3f0df8ac0f3a9792666c69a7cd70530f329036426d06b4f899c025aca74e"}, - {file = "pydantic-1.7.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f6864844b039805add62ebe8a8c676286340ba0c6d043ae5dea24114b82a319e"}, - {file = "pydantic-1.7.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:ecb54491f98544c12c66ff3d15e701612fc388161fd455242447083350904730"}, - {file = "pydantic-1.7.3-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:ffd180ebd5dd2a9ac0da4e8b995c9c99e7c74c31f985ba090ee01d681b1c4b95"}, - {file = "pydantic-1.7.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:8d72e814c7821125b16f1553124d12faba88e85405b0864328899aceaad7282b"}, - {file = "pydantic-1.7.3-cp39-cp39-win_amd64.whl", hash = "sha256:475f2fa134cf272d6631072554f845d0630907fce053926ff634cc6bc45bf1af"}, - {file = "pydantic-1.7.3-py3-none-any.whl", hash = "sha256:38be427ea01a78206bcaf9a56f835784afcba9e5b88fbdce33bbbfbcd7841229"}, - {file = "pydantic-1.7.3.tar.gz", hash = "sha256:213125b7e9e64713d16d988d10997dabc6a1f73f3991e1ff8e35ebb1409c7dc9"}, + {file = "pydantic-1.8.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0c40162796fc8d0aa744875b60e4dc36834db9f2a25dbf9ba9664b1915a23850"}, + {file = "pydantic-1.8.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:fff29fe54ec419338c522b908154a2efabeee4f483e48990f87e189661f31ce3"}, + {file = "pydantic-1.8.1-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:fbfb608febde1afd4743c6822c19060a8dbdd3eb30f98e36061ba4973308059e"}, + {file = "pydantic-1.8.1-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:eb8ccf12295113ce0de38f80b25f736d62f0a8d87c6b88aca645f168f9c78771"}, + {file = "pydantic-1.8.1-cp36-cp36m-win_amd64.whl", hash = "sha256:20d42f1be7c7acc352b3d09b0cf505a9fab9deb93125061b376fbe1f06a5459f"}, + {file = "pydantic-1.8.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dde4ca368e82791de97c2ec019681ffb437728090c0ff0c3852708cf923e0c7d"}, + {file = "pydantic-1.8.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:3bbd023c981cbe26e6e21c8d2ce78485f85c2e77f7bab5ec15b7d2a1f491918f"}, + {file = "pydantic-1.8.1-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:830ef1a148012b640186bf4d9789a206c56071ff38f2460a32ae67ca21880eb8"}, + {file = "pydantic-1.8.1-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:fb77f7a7e111db1832ae3f8f44203691e15b1fa7e5a1cb9691d4e2659aee41c4"}, + {file = "pydantic-1.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:3bcb9d7e1f9849a6bdbd027aabb3a06414abd6068cb3b21c49427956cce5038a"}, + {file = "pydantic-1.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2287ebff0018eec3cc69b1d09d4b7cebf277726fa1bd96b45806283c1d808683"}, + {file = "pydantic-1.8.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:4bbc47cf7925c86a345d03b07086696ed916c7663cb76aa409edaa54546e53e2"}, + {file = "pydantic-1.8.1-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:6388ef4ef1435364c8cc9a8192238aed030595e873d8462447ccef2e17387125"}, + {file = "pydantic-1.8.1-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:dd4888b300769ecec194ca8f2699415f5f7760365ddbe243d4fd6581485fa5f0"}, + {file = "pydantic-1.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:8fbb677e4e89c8ab3d450df7b1d9caed23f254072e8597c33279460eeae59b99"}, + {file = "pydantic-1.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2f2736d9a996b976cfdfe52455ad27462308c9d3d0ae21a2aa8b4cd1a78f47b9"}, + {file = "pydantic-1.8.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:3114d74329873af0a0e8004627f5389f3bb27f956b965ddd3e355fe984a1789c"}, + {file = "pydantic-1.8.1-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:258576f2d997ee4573469633592e8b99aa13bda182fcc28e875f866016c8e07e"}, + {file = "pydantic-1.8.1-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:c17a0b35c854049e67c68b48d55e026c84f35593c66d69b278b8b49e2484346f"}, + {file = "pydantic-1.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:e8bc082afef97c5fd3903d05c6f7bb3a6af9fc18631b4cc9fedeb4720efb0c58"}, + {file = "pydantic-1.8.1-py3-none-any.whl", hash = "sha256:e3f8790c47ac42549dc8b045a67b0ca371c7f66e73040d0197ce6172b385e520"}, + {file = "pydantic-1.8.1.tar.gz", hash = "sha256:26cf3cb2e68ec6c0cfcb6293e69fb3450c5fd1ace87f46b64f678b0d29eac4c3"}, ] pydocstyle = [ {file = "pydocstyle-5.1.1-py3-none-any.whl", hash = "sha256:aca749e190a01726a4fb472dd4ef23b5c9da7b9205c0a7857c06533de13fd678"}, {file = "pydocstyle-5.1.1.tar.gz", hash = "sha256:19b86fa8617ed916776a11cd8bc0197e5b9856d5433b777f51a3defe13075325"}, ] pyflakes = [ - {file = "pyflakes-2.2.0-py2.py3-none-any.whl", hash = "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92"}, - {file = "pyflakes-2.2.0.tar.gz", hash = "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8"}, + {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, + {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, ] pygments = [ - {file = "Pygments-2.7.4-py3-none-any.whl", hash = "sha256:bc9591213a8f0e0ca1a5e68a479b4887fdc3e75d0774e5c71c31920c427de435"}, - {file = "Pygments-2.7.4.tar.gz", hash = "sha256:df49d09b498e83c1a73128295860250b0b7edd4c723a32e9bc0d295c7c2ec337"}, + {file = "Pygments-2.8.1-py3-none-any.whl", hash = "sha256:534ef71d539ae97d4c3a4cf7d6f110f214b0e687e92f9cb9d2a3b0d3101289c8"}, + {file = "Pygments-2.8.1.tar.gz", hash = "sha256:2656e1a6edcdabf4275f9a3640db59fd5de107d88e8663c5d4e9a0fa62f77f94"}, ] pylint = [ - {file = "pylint-2.6.0-py3-none-any.whl", hash = "sha256:bfe68f020f8a0fece830a22dd4d5dddb4ecc6137db04face4c3420a46a52239f"}, - {file = "pylint-2.6.0.tar.gz", hash = "sha256:bb4a908c9dadbc3aac18860550e870f58e1a02c9f2c204fdf5693d73be061210"}, + {file = "pylint-2.7.4-py3-none-any.whl", hash = "sha256:209d712ec870a0182df034ae19f347e725c1e615b2269519ab58a35b3fcbbe7a"}, + {file = "pylint-2.7.4.tar.gz", hash = "sha256:bd38914c7731cdc518634a8d3c5585951302b6e2b6de60fbb3f7a0220e21eeee"}, ] pyparsing = [ {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, @@ -1077,47 +1141,47 @@ pyyaml = [ {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, ] regex = [ - {file = "regex-2020.11.13-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8b882a78c320478b12ff024e81dc7d43c1462aa4a3341c754ee65d857a521f85"}, - {file = "regex-2020.11.13-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a63f1a07932c9686d2d416fb295ec2c01ab246e89b4d58e5fa468089cab44b70"}, - {file = "regex-2020.11.13-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:6e4b08c6f8daca7d8f07c8d24e4331ae7953333dbd09c648ed6ebd24db5a10ee"}, - {file = "regex-2020.11.13-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:bba349276b126947b014e50ab3316c027cac1495992f10e5682dc677b3dfa0c5"}, - {file = "regex-2020.11.13-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:56e01daca75eae420bce184edd8bb341c8eebb19dd3bce7266332258f9fb9dd7"}, - {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:6a8ce43923c518c24a2579fda49f093f1397dad5d18346211e46f134fc624e31"}, - {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:1ab79fcb02b930de09c76d024d279686ec5d532eb814fd0ed1e0051eb8bd2daa"}, - {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:9801c4c1d9ae6a70aeb2128e5b4b68c45d4f0af0d1535500884d644fa9b768c6"}, - {file = "regex-2020.11.13-cp36-cp36m-win32.whl", hash = "sha256:49cae022fa13f09be91b2c880e58e14b6da5d10639ed45ca69b85faf039f7a4e"}, - {file = "regex-2020.11.13-cp36-cp36m-win_amd64.whl", hash = "sha256:749078d1eb89484db5f34b4012092ad14b327944ee7f1c4f74d6279a6e4d1884"}, - {file = "regex-2020.11.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b2f4007bff007c96a173e24dcda236e5e83bde4358a557f9ccf5e014439eae4b"}, - {file = "regex-2020.11.13-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:38c8fd190db64f513fe4e1baa59fed086ae71fa45083b6936b52d34df8f86a88"}, - {file = "regex-2020.11.13-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5862975b45d451b6db51c2e654990c1820523a5b07100fc6903e9c86575202a0"}, - {file = "regex-2020.11.13-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:262c6825b309e6485ec2493ffc7e62a13cf13fb2a8b6d212f72bd53ad34118f1"}, - {file = "regex-2020.11.13-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:bafb01b4688833e099d79e7efd23f99172f501a15c44f21ea2118681473fdba0"}, - {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:e32f5f3d1b1c663af7f9c4c1e72e6ffe9a78c03a31e149259f531e0fed826512"}, - {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:3bddc701bdd1efa0d5264d2649588cbfda549b2899dc8d50417e47a82e1387ba"}, - {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:02951b7dacb123d8ea6da44fe45ddd084aa6777d4b2454fa0da61d569c6fa538"}, - {file = "regex-2020.11.13-cp37-cp37m-win32.whl", hash = "sha256:0d08e71e70c0237883d0bef12cad5145b84c3705e9c6a588b2a9c7080e5af2a4"}, - {file = "regex-2020.11.13-cp37-cp37m-win_amd64.whl", hash = "sha256:1fa7ee9c2a0e30405e21031d07d7ba8617bc590d391adfc2b7f1e8b99f46f444"}, - {file = "regex-2020.11.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:baf378ba6151f6e272824b86a774326f692bc2ef4cc5ce8d5bc76e38c813a55f"}, - {file = "regex-2020.11.13-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e3faaf10a0d1e8e23a9b51d1900b72e1635c2d5b0e1bea1c18022486a8e2e52d"}, - {file = "regex-2020.11.13-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2a11a3e90bd9901d70a5b31d7dd85114755a581a5da3fc996abfefa48aee78af"}, - {file = "regex-2020.11.13-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1ebb090a426db66dd80df8ca85adc4abfcbad8a7c2e9a5ec7513ede522e0a8f"}, - {file = "regex-2020.11.13-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:b2b1a5ddae3677d89b686e5c625fc5547c6e492bd755b520de5332773a8af06b"}, - {file = "regex-2020.11.13-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:2c99e97d388cd0a8d30f7c514d67887d8021541b875baf09791a3baad48bb4f8"}, - {file = "regex-2020.11.13-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:c084582d4215593f2f1d28b65d2a2f3aceff8342aa85afd7be23a9cad74a0de5"}, - {file = "regex-2020.11.13-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:a3d748383762e56337c39ab35c6ed4deb88df5326f97a38946ddd19028ecce6b"}, - {file = "regex-2020.11.13-cp38-cp38-win32.whl", hash = "sha256:7913bd25f4ab274ba37bc97ad0e21c31004224ccb02765ad984eef43e04acc6c"}, - {file = "regex-2020.11.13-cp38-cp38-win_amd64.whl", hash = "sha256:6c54ce4b5d61a7129bad5c5dc279e222afd00e721bf92f9ef09e4fae28755683"}, - {file = "regex-2020.11.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1862a9d9194fae76a7aaf0150d5f2a8ec1da89e8b55890b1786b8f88a0f619dc"}, - {file = "regex-2020.11.13-cp39-cp39-manylinux1_i686.whl", hash = "sha256:4902e6aa086cbb224241adbc2f06235927d5cdacffb2425c73e6570e8d862364"}, - {file = "regex-2020.11.13-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7a25fcbeae08f96a754b45bdc050e1fb94b95cab046bf56b016c25e9ab127b3e"}, - {file = "regex-2020.11.13-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:d2d8ce12b7c12c87e41123997ebaf1a5767a5be3ec545f64675388970f415e2e"}, - {file = "regex-2020.11.13-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:f7d29a6fc4760300f86ae329e3b6ca28ea9c20823df123a2ea8693e967b29917"}, - {file = "regex-2020.11.13-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:717881211f46de3ab130b58ec0908267961fadc06e44f974466d1887f865bd5b"}, - {file = "regex-2020.11.13-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:3128e30d83f2e70b0bed9b2a34e92707d0877e460b402faca908c6667092ada9"}, - {file = "regex-2020.11.13-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:8f6a2229e8ad946e36815f2a03386bb8353d4bde368fdf8ca5f0cb97264d3b5c"}, - {file = "regex-2020.11.13-cp39-cp39-win32.whl", hash = "sha256:f8f295db00ef5f8bae530fc39af0b40486ca6068733fb860b42115052206466f"}, - {file = "regex-2020.11.13-cp39-cp39-win_amd64.whl", hash = "sha256:a15f64ae3a027b64496a71ab1f722355e570c3fac5ba2801cafce846bf5af01d"}, - {file = "regex-2020.11.13.tar.gz", hash = "sha256:83d6b356e116ca119db8e7c6fc2983289d87b27b3fac238cfe5dca529d884562"}, + {file = "regex-2021.4.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7"}, + {file = "regex-2021.4.4-cp36-cp36m-win32.whl", hash = "sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29"}, + {file = "regex-2021.4.4-cp36-cp36m-win_amd64.whl", hash = "sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79"}, + {file = "regex-2021.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439"}, + {file = "regex-2021.4.4-cp37-cp37m-win32.whl", hash = "sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d"}, + {file = "regex-2021.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3"}, + {file = "regex-2021.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87"}, + {file = "regex-2021.4.4-cp38-cp38-win32.whl", hash = "sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac"}, + {file = "regex-2021.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2"}, + {file = "regex-2021.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042"}, + {file = "regex-2021.4.4-cp39-cp39-win32.whl", hash = "sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6"}, + {file = "regex-2021.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07"}, + {file = "regex-2021.4.4.tar.gz", hash = "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb"}, ] requests = [ {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, @@ -1127,13 +1191,21 @@ requests-mock = [ {file = "requests-mock-1.8.0.tar.gz", hash = "sha256:e68f46844e4cee9d447150343c9ae875f99fa8037c6dcf5f15bf1fe9ab43d226"}, {file = "requests_mock-1.8.0-py2.py3-none-any.whl", hash = "sha256:11215c6f4df72702aa357f205cf1e537cffd7392b3e787b58239bde5fb3db53b"}, ] +rfc3339-validator = [ + {file = "rfc3339_validator-0.1.3-py2.py3-none-any.whl", hash = "sha256:bf86bab55fd90bf5fa42c84d63066053345e675efcd351d1266d6d20b46ca86e"}, + {file = "rfc3339_validator-0.1.3.tar.gz", hash = "sha256:7a578aa0740e9ee2b48356fe1f347139190c4c72e27f303b3617054efd15df32"}, +] +rfc3987 = [ + {file = "rfc3987-1.3.8-py2.py3-none-any.whl", hash = "sha256:10702b1e51e5658843460b189b185c0366d2cf4cff716f13111b0ea9fd2dce53"}, + {file = "rfc3987-1.3.8.tar.gz", hash = "sha256:d3c4d257a560d544e9826b38bc81db676890c79ab9d7ac92b39c7a253d5ca733"}, +] rich = [ - {file = "rich-9.8.2-py3-none-any.whl", hash = "sha256:d7732d12dfa91a2c06f89fa2b630a068ba12d39ad22a2078b477ef1948b38f3f"}, - {file = "rich-9.8.2.tar.gz", hash = "sha256:c0d5903b463f015b254d6f52da82af3821d266fe516ae05fdc266e6abba5c3a8"}, + {file = "rich-9.13.0-py3-none-any.whl", hash = "sha256:9004f6449c89abadf689dad6f92393e760b8c3a8a8c4ea6d8d474066307c0e66"}, + {file = "rich-9.13.0.tar.gz", hash = "sha256:d59e94a0e3e686f0d268fe5c7060baa1bd6744abca71b45351f5850a3aaa6764"}, ] "ruamel.yaml" = [ - {file = "ruamel.yaml-0.16.12-py2.py3-none-any.whl", hash = "sha256:012b9470a0ea06e4e44e99e7920277edf6b46eee0232a04487ea73a7386340a5"}, - {file = "ruamel.yaml-0.16.12.tar.gz", hash = "sha256:076cc0bc34f1966d920a49f18b52b6ad559fbe656a0748e3535cf7b3f29ebf9e"}, + {file = "ruamel.yaml-0.16.13-py2.py3-none-any.whl", hash = "sha256:64b06e7873eb8e1125525ecef7345447d786368cadca92a7cd9b59eae62e95a3"}, + {file = "ruamel.yaml-0.16.13.tar.gz", hash = "sha256:bb48c514222702878759a05af96f4b7ecdba9b33cd4efcf25c86b882cef3a942"}, ] "ruamel.yaml.clib" = [ {file = "ruamel.yaml.clib-0.2.2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:28116f204103cb3a108dfd37668f20abe6e3cafd0d3fd40dba126c732457b3cc"}, @@ -1166,8 +1238,8 @@ six = [ {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"}, ] smmap = [ - {file = "smmap-3.0.4-py2.py3-none-any.whl", hash = "sha256:54c44c197c819d5ef1991799a7e30b662d1e520f2ac75c9efbeb54a742214cf4"}, - {file = "smmap-3.0.4.tar.gz", hash = "sha256:9c98bbd1f9786d22f14b3d4126894d56befb835ec90cef151af566c7e19b5d24"}, + {file = "smmap-4.0.0-py2.py3-none-any.whl", hash = "sha256:a9a7479e4c572e2e775c404dcd3080c8dc49f39918c2cf74913d30c4c478e3c2"}, + {file = "smmap-4.0.0.tar.gz", hash = "sha256:7e65386bd122d45405ddf795637b7f7d2b532e7e401d46bbe3fb49b9986d5182"}, ] snowballstemmer = [ {file = "snowballstemmer-2.1.0-py2.py3-none-any.whl", hash = "sha256:b51b447bea85f9968c13b650126a888aabd4cb4463fca868ec596826325dedc2"}, @@ -1177,6 +1249,9 @@ stevedore = [ {file = "stevedore-3.3.0-py3-none-any.whl", hash = "sha256:50d7b78fbaf0d04cd62411188fa7eedcb03eb7f4c4b37005615ceebe582aa82a"}, {file = "stevedore-3.3.0.tar.gz", hash = "sha256:3a5bbd0652bf552748871eaa73a4a8dc2899786bc497a2aa1fcb4dcdb0debeee"}, ] +strict-rfc3339 = [ + {file = "strict-rfc3339-0.7.tar.gz", hash = "sha256:5cad17bedfc3af57b399db0fed32771f18fc54bbd917e85546088607ac5e1277"}, +] termcolor = [ {file = "termcolor-1.1.0.tar.gz", hash = "sha256:1d6d69ce66211143803fbc56652b41d73b4a400a2891d7bf7a1cdf4c02de613b"}, ] @@ -1185,36 +1260,36 @@ toml = [ {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] typed-ast = [ - {file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:7703620125e4fb79b64aa52427ec192822e9f45d37d4b6625ab37ef403e1df70"}, - {file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c9aadc4924d4b5799112837b226160428524a9a45f830e0d0f184b19e4090487"}, - {file = "typed_ast-1.4.2-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:9ec45db0c766f196ae629e509f059ff05fc3148f9ffd28f3cfe75d4afb485412"}, - {file = "typed_ast-1.4.2-cp35-cp35m-win32.whl", hash = "sha256:85f95aa97a35bdb2f2f7d10ec5bbdac0aeb9dafdaf88e17492da0504de2e6400"}, - {file = "typed_ast-1.4.2-cp35-cp35m-win_amd64.whl", hash = "sha256:9044ef2df88d7f33692ae3f18d3be63dec69c4fb1b5a4a9ac950f9b4ba571606"}, - {file = "typed_ast-1.4.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c1c876fd795b36126f773db9cbb393f19808edd2637e00fd6caba0e25f2c7b64"}, - {file = "typed_ast-1.4.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:5dcfc2e264bd8a1db8b11a892bd1647154ce03eeba94b461effe68790d8b8e07"}, - {file = "typed_ast-1.4.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8db0e856712f79c45956da0c9a40ca4246abc3485ae0d7ecc86a20f5e4c09abc"}, - {file = "typed_ast-1.4.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:d003156bb6a59cda9050e983441b7fa2487f7800d76bdc065566b7d728b4581a"}, - {file = "typed_ast-1.4.2-cp36-cp36m-win32.whl", hash = "sha256:4c790331247081ea7c632a76d5b2a265e6d325ecd3179d06e9cf8d46d90dd151"}, - {file = "typed_ast-1.4.2-cp36-cp36m-win_amd64.whl", hash = "sha256:d175297e9533d8d37437abc14e8a83cbc68af93cc9c1c59c2c292ec59a0697a3"}, - {file = "typed_ast-1.4.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf54cfa843f297991b7388c281cb3855d911137223c6b6d2dd82a47ae5125a41"}, - {file = "typed_ast-1.4.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:b4fcdcfa302538f70929eb7b392f536a237cbe2ed9cba88e3bf5027b39f5f77f"}, - {file = "typed_ast-1.4.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:987f15737aba2ab5f3928c617ccf1ce412e2e321c77ab16ca5a293e7bbffd581"}, - {file = "typed_ast-1.4.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:37f48d46d733d57cc70fd5f30572d11ab8ed92da6e6b28e024e4a3edfb456e37"}, - {file = "typed_ast-1.4.2-cp37-cp37m-win32.whl", hash = "sha256:36d829b31ab67d6fcb30e185ec996e1f72b892255a745d3a82138c97d21ed1cd"}, - {file = "typed_ast-1.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8368f83e93c7156ccd40e49a783a6a6850ca25b556c0fa0240ed0f659d2fe496"}, - {file = "typed_ast-1.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:963c80b583b0661918718b095e02303d8078950b26cc00b5e5ea9ababe0de1fc"}, - {file = "typed_ast-1.4.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e683e409e5c45d5c9082dc1daf13f6374300806240719f95dc783d1fc942af10"}, - {file = "typed_ast-1.4.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:84aa6223d71012c68d577c83f4e7db50d11d6b1399a9c779046d75e24bed74ea"}, - {file = "typed_ast-1.4.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:a38878a223bdd37c9709d07cd357bb79f4c760b29210e14ad0fb395294583787"}, - {file = "typed_ast-1.4.2-cp38-cp38-win32.whl", hash = "sha256:a2c927c49f2029291fbabd673d51a2180038f8cd5a5b2f290f78c4516be48be2"}, - {file = "typed_ast-1.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:c0c74e5579af4b977c8b932f40a5464764b2f86681327410aa028a22d2f54937"}, - {file = "typed_ast-1.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07d49388d5bf7e863f7fa2f124b1b1d89d8aa0e2f7812faff0a5658c01c59aa1"}, - {file = "typed_ast-1.4.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:240296b27397e4e37874abb1df2a608a92df85cf3e2a04d0d4d61055c8305ba6"}, - {file = "typed_ast-1.4.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:d746a437cdbca200622385305aedd9aef68e8a645e385cc483bdc5e488f07166"}, - {file = "typed_ast-1.4.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:14bf1522cdee369e8f5581238edac09150c765ec1cb33615855889cf33dcb92d"}, - {file = "typed_ast-1.4.2-cp39-cp39-win32.whl", hash = "sha256:cc7b98bf58167b7f2db91a4327da24fb93368838eb84a44c472283778fc2446b"}, - {file = "typed_ast-1.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:7147e2a76c75f0f64c4319886e7639e490fee87c9d25cb1d4faef1d8cf83a440"}, - {file = "typed_ast-1.4.2.tar.gz", hash = "sha256:9fc0b3cb5d1720e7141d103cf4819aea239f7d136acf9ee4a69b047b7986175a"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"}, + {file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"}, + {file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"}, + {file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"}, + {file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"}, + {file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"}, + {file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"}, + {file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"}, + {file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"}, + {file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"}, + {file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"}, + {file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"}, + {file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"}, + {file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"}, + {file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"}, + {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, ] typing-extensions = [ {file = "typing_extensions-3.7.4.3-py2-none-any.whl", hash = "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f"}, @@ -1222,8 +1297,8 @@ typing-extensions = [ {file = "typing_extensions-3.7.4.3.tar.gz", hash = "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c"}, ] urllib3 = [ - {file = "urllib3-1.26.2-py2.py3-none-any.whl", hash = "sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473"}, - {file = "urllib3-1.26.2.tar.gz", hash = "sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08"}, + {file = "urllib3-1.26.4-py2.py3-none-any.whl", hash = "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df"}, + {file = "urllib3-1.26.4.tar.gz", hash = "sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937"}, ] wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, @@ -1233,10 +1308,9 @@ wrapt = [ {file = "wrapt-1.12.1.tar.gz", hash = "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"}, ] yamllint = [ - {file = "yamllint-1.25.0-py2.py3-none-any.whl", hash = "sha256:c7be4d0d2584a1b561498fa9acb77ad22eb434a109725c7781373ae496d823b3"}, - {file = "yamllint-1.25.0.tar.gz", hash = "sha256:b1549cbe5b47b6ba67bdeea31720f5c51431a4d0c076c1557952d841f7223519"}, + {file = "yamllint-1.26.1.tar.gz", hash = "sha256:87d9462b3ed7e9dfa19caa177f7a77cd9888b3dc4044447d6ae0ab233bcd1324"}, ] zipp = [ - {file = "zipp-3.4.0-py3-none-any.whl", hash = "sha256:102c24ef8f171fd729d46599845e95c7ab894a4cf45f5de11a44cc7444fb1108"}, - {file = "zipp-3.4.0.tar.gz", hash = "sha256:ed5eee1974372595f9e416cc7bbeeb12335201d8081ca8a0743c954d4446e5cb"}, + {file = "zipp-3.4.1-py3-none-any.whl", hash = "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098"}, + {file = "zipp-3.4.1.tar.gz", hash = "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76"}, ] diff --git a/pyproject.toml b/pyproject.toml index d48b7cc..c1ce9cc 100755 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "schema-enforcer" -version = "1.0.1" +version = "1.1.0" description = "Tool/Framework for testing structured data against schema definitions" authors = ["Network to Code, LLC "] license = "Apache-2.0" @@ -15,28 +15,38 @@ include = [ [tool.poetry.dependencies] python = "^3.7" -click = "^7.1.2" -termcolor = "^1.1.0" -jsonschema = "^3.2.0" -toml = "^0.10.0" -"ruamel.yaml" = "^0.16.10" -jinja2 = "^2.11.2" +click = "^7.1" +termcolor = "^1.1" +jsonschema = "^3.2" +toml = "^0.10" +"ruamel.yaml" = "^0.16" +jinja2 = "^2.11" jsonref = "^0.2" -pydantic = "^1.6.1" -rich = "^9.5.1" -ansible = "^2.8.0" +pydantic = "^1.6" +rich = "^9.5" +rfc3987 = "^1.3" +jsonpointer = "^2.1" +strict-rfc3339 = "^0.7" +rfc3339-validator = "^0.1" +jmespath = "^0.10" +ansible = { version = "^2.10.0", optional = true } +ansible-base = { version = "^2.10.0", optional = true } + +[tool.poetry.extras] +ansible = ["ansible"] +ansible-base = ["ansible-base"] [tool.poetry.dev-dependencies] -pytest = "^5.4.1" -requests_mock = "^1.7.0" +pytest = "^5.4" +requests_mock = "^1.7" pyyaml = "^5.3" black = "^19.10b0" -pylint = "^2.4.4" -pydocstyle = "^5.0.2" -yamllint = "^1.20.0" -bandit = "^1.6.2" -invoke = "^1.4.1" -flake8 = "^3.8.3" +pylint = "^2.4" +pydocstyle = "^5.0" +yamllint = "^1.20" +bandit = "^1.6" +invoke = "^1.4" +flake8 = "^3.8" coverage = "^5.3" [tool.poetry.scripts] @@ -75,6 +85,9 @@ notes = """, XXX, """ +[tool.pylint.SIMILARITIES] +min-similarity-lines = 15 + [tool.pytest.ini_options] testpaths = [ "tests" diff --git a/schema_enforcer/cli.py b/schema_enforcer/cli.py index 2ebbbe2..e76cb8e 100644 --- a/schema_enforcer/cli.py +++ b/schema_enforcer/cli.py @@ -9,6 +9,7 @@ from schema_enforcer.schemas.manager import SchemaManager from schema_enforcer.instances.file import InstanceFileManager from schema_enforcer.utils import error +from schema_enforcer.exceptions import InvalidJSONSchema @click.group() @@ -53,7 +54,11 @@ def validate(show_pass, show_checks, strict): # noqa D205 # --------------------------------------------------------------------- # Load Schema(s) from disk # --------------------------------------------------------------------- - smgr = SchemaManager(config=config.SETTINGS) + try: + smgr = SchemaManager(config=config.SETTINGS) + except InvalidJSONSchema as exc: + error(str(exc)) + sys.exit(1) if not smgr.schemas: error("No schemas were loaded") @@ -68,6 +73,9 @@ def validate(show_pass, show_checks, strict): # noqa D205 error("No instance files were found to validate") sys.exit(1) + if config.SETTINGS.data_file_automap: + ifm.add_matches_by_property_automap(smgr) + if show_checks: ifm.print_schema_mapping() sys.exit(0) @@ -154,7 +162,11 @@ def schema(check, generate_invalid, list_schemas, schema_id, dump_schemas): # n # --------------------------------------------------------------------- # Load Schema(s) from disk # --------------------------------------------------------------------- - smgr = SchemaManager(config=config.SETTINGS) + try: + smgr = SchemaManager(config=config.SETTINGS) + except InvalidJSONSchema as exc: + error(str(exc)) + sys.exit(1) if not smgr.schemas: error("No schemas were loaded") @@ -192,7 +204,7 @@ def schema(check, generate_invalid, list_schemas, schema_id, dump_schemas): # n ) def ansible( inventory, limit, show_pass, show_checks -): # pylint: disable=too-many-branches,too-many-locals,too-many-locals # noqa: D417,D301 +): # pylint: disable=too-many-branches,too-many-locals,too-many-locals,too-many-statements # noqa: D417,D301 """Validate the hostvars for all hosts within an Ansible inventory. The hostvars are dynamically rendered based on groups to which each host belongs. @@ -249,7 +261,11 @@ def ansible( # --------------------------------------------------------------------- # Load Schema(s) from disk # --------------------------------------------------------------------- - smgr = SchemaManager(config=config.SETTINGS) + try: + smgr = SchemaManager(config=config.SETTINGS) + except InvalidJSONSchema as exc: + error(str(exc)) + sys.exit(1) if not smgr.schemas: error("No schemas were loaded") @@ -288,7 +304,6 @@ def ansible( # Acquire schemas applicable to the given host applicable_schemas = inv.get_applicable_schemas(hostvars, smgr, declared_schema_ids, automap) - # import pdb; pdb.set_trace() for schema_obj in applicable_schemas.values(): # Combine host attributes into a single data structure matching to properties defined at the top level of the schema definition if not strict: @@ -303,8 +318,9 @@ def ansible( data = hostvars # Validate host vars against schema - for result in schema_obj.validate(data=data, strict=strict): + schema_obj.validate(data=data, strict=strict) + for result in schema_obj.get_results(): result.instance_type = "HOST" result.instance_hostname = host.name @@ -314,6 +330,7 @@ def ansible( elif result.passed() and show_pass: result.print() + schema_obj.clear_results() if not error_exists: print(colored("ALL SCHEMA VALIDATION CHECKS PASSED", "green")) diff --git a/schema_enforcer/config.py b/schema_enforcer/config.py index 8909241..1fd32f3 100644 --- a/schema_enforcer/config.py +++ b/schema_enforcer/config.py @@ -27,6 +27,7 @@ class Settings(BaseSettings): # pylint: disable=too-few-public-methods main_directory: str = "schema" definition_directory: str = "definitions" schema_directory: str = "schemas" + validator_directory: str = "validators" test_directory: str = "tests" # Settings specific to the schema files @@ -37,6 +38,7 @@ class Settings(BaseSettings): # pylint: disable=too-few-public-methods data_file_search_directories: List[str] = ["./"] data_file_extensions: List[str] = [".json", ".yaml", ".yml"] data_file_exclude_filenames: List[str] = [".yamllint.yml", ".travis.yml"] + data_file_automap: bool = True ansible_inventory: Optional[str] schema_mapping: Dict = dict() diff --git a/schema_enforcer/exceptions.py b/schema_enforcer/exceptions.py index 1665c72..3fe75ae 100644 --- a/schema_enforcer/exceptions.py +++ b/schema_enforcer/exceptions.py @@ -18,3 +18,21 @@ class SchemaNotDefined(Exception): Args (Exception): Base Exception Object """ + + +class InvalidJSONSchema(Exception): + """Raised when a JSONschema file is invalid. + + Args (Exception): Base Exception Object + """ + + def __init__(self, schema): + """Provide instance variables when invalid schema is detected.""" + super().__init__(schema) + self.schema = schema + + def __str__(self): + """Generate error string including validation errors.""" + errors = [result.message for result in self.schema.check_if_valid() if not result.passed()] + message = f"Invalid JSONschema file: {self.schema.filename} - {errors}" + return message diff --git a/schema_enforcer/instances/file.py b/schema_enforcer/instances/file.py index 83e7185..bd7024a 100644 --- a/schema_enforcer/instances/file.py +++ b/schema_enforcer/instances/file.py @@ -17,14 +17,14 @@ def __init__(self, config): The file manager will locate all potential instance files in the search directories. Args: - config (string): The pydantec config object. + config (pydantic.BaseSettings): The Pydantec settings object. """ self.instances = [] self.config = config # Find all instance files # TODO need to load file extensions from the config - files = find_files( + instance_files = find_files( file_extensions=config.data_file_extensions, search_directories=config.data_file_search_directories, excluded_filenames=config.data_file_exclude_filenames, @@ -34,14 +34,23 @@ def __init__(self, config): # For each instance file, check if there is a static mapping defined in the config # Create the InstanceFile object and save it - for root, filename in files: - matches = [] + for root, filename in instance_files: + matches = set() if filename in config.schema_mapping: - matches.extend(config.schema_mapping[filename]) + matches.update(config.schema_mapping[filename]) instance = InstanceFile(root=root, filename=filename, matches=matches) self.instances.append(instance) + def add_matches_by_property_automap(self, schema_manager): + """Adds schema_ids to matches by automapping top level schema properties to top level keys in instance data. + + Args: + schema_manager (schema_enforcer.schemas.manager.SchemaManager): Schema manager oject + """ + for instance in self.instances: + instance.add_matches_by_property_automap(schema_manager) + def print_schema_mapping(self): """Print in CLI the matches for all instance files.""" print("{:50} Schema ID".format("Structured Data File")) @@ -49,7 +58,7 @@ def print_schema_mapping(self): print_strings = [] for instance in self.instances: filepath = f"{instance.path}/{instance.filename}" - print_strings.append(f"{filepath:50} {instance.matches}") + print_strings.append(f"{filepath:50} {sorted(instance.matches)}") print("\n".join(sorted(print_strings))) @@ -62,53 +71,91 @@ def __init__(self, root, filename, matches=None): Args: root (string): Absolute path to the directory where the schema file is located. filename (string): Name of the file. - matches (list, optional): List of schema IDs that matches with this Instance file. Defaults to None. + matches (set, optional): Set of schema IDs that matches with this Instance file. Defaults to None. """ self.data = None self.path = root self.full_path = os.path.realpath(root) self.filename = filename + # Internal vars for caching data + self._top_level_properties = set() + if matches: self.matches = matches else: - self.matches = [] + self.matches = set() + + self._add_matches_by_decorator() + + @property + def top_level_properties(self): + """Return a list of top level properties in the structured data defined by the data pulled from _get_content. + + Returns: + set: Set of the strings of top level properties defined by the data file + """ + if not self._top_level_properties: + content = self._get_content() + self._top_level_properties = set(content.keys()) - self.matches.extend(self._find_matches_inline()) + return self._top_level_properties - def _find_matches_inline(self, content=None): - """Find addition matches using the Schema ID decorator comment. + def _add_matches_by_decorator(self, content=None): + """Add matches which declare schema IDs they should adhere to using a decorator comment. - Look for a line with # jsonschema: schema_id,schema_id + If a line of the form # jsonschema: , is defined in the data file, the + schema IDs will be added to the list of schema IDs the data will be checked for adherence to. Args: content (string, optional): Content of the file to analyze. Default to None. Returns: - list(string): List of matches found in the file. + set(string): Set of matches (strings of schema_ids) found in the file. """ if not content: - content = Path(os.path.join(self.full_path, self.filename)).read_text() + content = self._get_content(structured=False) - matches = [] + matches = set() if SCHEMA_TAG in content: line_regexp = r"^#.*{0}:\s*(.*)$".format(SCHEMA_TAG) match = re.match(line_regexp, content, re.MULTILINE) if match: - matches = [x.strip() for x in match.group(1).split(",")] + matches = {x.strip() for x in match.group(1).split(",")} - return matches + self.matches.update(matches) - def get_content(self): - """Return the content of the instance file in structured format. + def _get_content(self, structured=True): + """Returns the content of the instance file. - Content returned can be either dict or list depending on the content of the file + Args: + structured (bool): Return structured data if true. If false returns the string representation of the data + stored in the instance file. Defaults to True. Returns: - dict or list: Content of the instance file. + dict, list, or str: File Contents. Dict or list if structured is set to True. Otherwise returns a string. + """ + file_location = os.path.join(self.full_path, self.filename) + + if not structured: + return Path(file_location).read_text() + + return load_file(file_location) + + def add_matches_by_property_automap(self, schema_manager): + """Adds schema_ids to self.matches by automapping top level schema properties to top level keys in instance data. + + Args: + schema_manager (schema_enforcer.schemas.manager.SchemaManager): Schema manager oject """ - return load_file(os.path.join(self.full_path, self.filename)) + matches = set() + + for schema_id, schema_obj in schema_manager.iter_schemas(): + if schema_obj.top_level_properties.intersection(self.top_level_properties): + matches.add(schema_id) + + self.matches.update(matches) def validate(self, schema_manager, strict=False): """Validate this instance file with all matching schema in the schema manager. @@ -128,6 +175,9 @@ def validate(self, schema_manager, strict=False): for schema_id, schema in schema_manager.iter_schemas(): if schema_id not in self.matches: continue - errs = itertools.chain(errs, schema.validate(self.get_content(), strict)) + schema.validate(self._get_content(), strict) + results = schema.get_results() + errs = itertools.chain(errs, results) + schema.clear_results() return errs diff --git a/schema_enforcer/schemas/jsonschema.py b/schema_enforcer/schemas/jsonschema.py index 48e94cd..7b3c7bd 100644 --- a/schema_enforcer/schemas/jsonschema.py +++ b/schema_enforcer/schemas/jsonschema.py @@ -2,7 +2,9 @@ import copy import pkgutil import json -from jsonschema import Draft7Validator # pylint: disable=import-self + +from jsonschema import Draft7Validator, draft7_format_checker # pylint: disable=import-self +from schema_enforcer.schemas.validator import BaseValidation from schema_enforcer.validation import ValidationResult, RESULT_FAIL, RESULT_PASS # TODO do we need to catch a possible exception here ? @@ -10,7 +12,7 @@ v7schema = json.loads(v7data.decode("utf-8")) -class JsonSchema: +class JsonSchema(BaseValidation): # pylint: disable=too-many-instance-attributes """class to manage jsonschema type schemas.""" schematype = "jsonchema" @@ -23,15 +25,15 @@ def __init__(self, schema, filename, root): filename (string): Name of the schema file on the filesystem. root (string): Absolute path to the directory where the schema file is located. """ + super().__init__() self.filename = filename self.root = root self.data = schema self.id = self.data.get("$id") # pylint: disable=invalid-name - self.top_level_properties = [ - prop for prop in self.data.get("properties") # pylint: disable=unnecessary-comprehension - ] + self.top_level_properties = set(self.data.get("properties")) self.validator = None self.strict_validator = None + self.format_checker = draft7_format_checker def get_id(self): """Return the unique ID of the schema.""" @@ -56,14 +58,11 @@ def validate(self, data, strict=False): for err in validator.iter_errors(data): has_error = True - yield ValidationResult( - schema_id=self.id, result=RESULT_FAIL, message=err.message, absolute_path=list(err.absolute_path) - ) + self.add_validation_error(err.message, absolute_path=list(err.absolute_path)) if not has_error: - yield ValidationResult( - schema_id=self.id, result=RESULT_PASS, - ) + self.add_validation_pass() + return self.get_results() def validate_to_dict(self, data, strict=False): """Return a list of ValidationResult objects. @@ -90,7 +89,7 @@ def __get_validator(self): if self.validator: return self.validator - self.validator = Draft7Validator(self.data) + self.validator = Draft7Validator(self.data, format_checker=self.format_checker) return self.validator @@ -124,7 +123,7 @@ def __get_strict_validator(self): ) items["additionalProperties"] = False - self.strict_validator = Draft7Validator(schema) + self.strict_validator = Draft7Validator(schema, format_checker=self.format_checker) return self.strict_validator def check_if_valid(self): @@ -133,7 +132,7 @@ def check_if_valid(self): Returns: List[ValidationResult]: A list of validation result objects. """ - validator = Draft7Validator(v7schema) + validator = Draft7Validator(v7schema, format_checker=self.format_checker) results = [] has_error = False diff --git a/schema_enforcer/schemas/manager.py b/schema_enforcer/schemas/manager.py index 107b263..02b4ea4 100644 --- a/schema_enforcer/schemas/manager.py +++ b/schema_enforcer/schemas/manager.py @@ -10,9 +10,10 @@ from schema_enforcer.utils import load_file, find_file, find_files, dump_data_to_yaml from schema_enforcer.validation import ValidationResult, RESULT_PASS, RESULT_FAIL -from schema_enforcer.exceptions import SchemaNotDefined +from schema_enforcer.exceptions import SchemaNotDefined, InvalidJSONSchema from schema_enforcer.utils import error, warn from schema_enforcer.schemas.jsonschema import JsonSchema +from schema_enforcer.schemas.validator import load_validators class SchemaManager: @@ -43,6 +44,10 @@ def __init__(self, config): schema = self.create_schema_from_file(root, filename) self.schemas[schema.get_id()] = schema + # Load validators + validators = load_validators(config.validator_directory) + self.schemas.update(validators) + def create_schema_from_file(self, root, filename): # pylint: disable=no-self-use """Create a new JsonSchema object for a given file. @@ -61,7 +66,12 @@ def create_schema_from_file(self, root, filename): # pylint: disable=no-self-us # schema_type = "jsonschema" base_uri = f"file:{root}/" schema_full = jsonref.JsonRef.replace_refs(file_data, base_uri=base_uri, jsonschema=True, loader=load_file) - return JsonSchema(schema=schema_full, filename=filename, root=root) + schema = JsonSchema(schema=schema_full, filename=filename, root=root) + # Only add valid jsonschema files and raise an exception if an invalid file is found + valid = all((result.passed() for result in schema.check_if_valid())) + if not valid: + raise InvalidJSONSchema(schema) + return schema def iter_schemas(self): """Return an iterator of all schemas in the SchemaManager. @@ -192,6 +202,7 @@ def test_schema_invalid(self, schema_id): # pylint: disable=too-many-locals results = [] for test_dir in test_dirs: + schema.clear_results() data_file = find_file(os.path.join(invalid_test_dir, test_dir, "data")) expected_results_file = find_file(os.path.join(invalid_test_dir, test_dir, "results")) @@ -253,7 +264,7 @@ def generate_invalid_tests_expected(self, schema_id): # For each test, load the data file, test the data against the schema and save the results for test_dir in test_dirs: - + schema.clear_results() data_file = find_file(os.path.join(invalid_test_dir, test_dir, "data")) if not data_file: diff --git a/schema_enforcer/schemas/validator.py b/schema_enforcer/schemas/validator.py new file mode 100644 index 0000000..7e4337e --- /dev/null +++ b/schema_enforcer/schemas/validator.py @@ -0,0 +1,110 @@ +"""Classes for custom validator plugins.""" +# pylint: disable=no-member, too-few-public-methods +# See PEP585 (https://www.python.org/dev/peps/pep-0585/) +from __future__ import annotations +import pkgutil +import inspect +import jmespath +from schema_enforcer.validation import ValidationResult + + +class BaseValidation: + """Base class for Validation classes.""" + + def __init__(self): + """Base init for all validation classes.""" + self._results: list[ValidationResult] = [] + + def add_validation_error(self, message: str, **kwargs): + """Add validator error to results. + + Args: + message (str): error message + kwargs (optional): additional arguments to add to ValidationResult when required + """ + self._results.append(ValidationResult(result="FAIL", schema_id=self.id, message=message, **kwargs)) + + def add_validation_pass(self, **kwargs): + """Add validator pass to results. + + Args: + kwargs (optional): additional arguments to add to ValidationResult when required + """ + self._results.append(ValidationResult(result="PASS", schema_id=self.id, **kwargs)) + + def get_results(self) -> list[ValidationResult]: + """Return all validation results for this validator.""" + if not self._results: + self._results.append(ValidationResult(result="PASS", schema_id=self.id)) + + return self._results + + def clear_results(self): + """Reset results for validator instance.""" + self._results = [] + + def validate(self, data: dict, strict: bool): + """Required function for custom validator. + + Args: + data (dict): variables to be validated by validator + strict (bool): true when --strict cli option is used to request strict validation (if provided) + + Returns: + None + + Use add_validation_error and add_validation_pass to report results. + """ + raise NotImplementedError + + +class JmesPathModelValidation(BaseValidation): + """Base class for JmesPathModelValidation classes.""" + + def validate(self, data: dict, strict: bool): # pylint: disable=W0613 + """Validate data using custom jmespath validator plugin.""" + operators = { + "gt": lambda r, v: int(r) > int(v), + "gte": lambda r, v: int(r) >= int(v), + "eq": lambda r, v: r == v, + "lt": lambda r, v: int(r) < int(v), + "lte": lambda r, v: int(r) <= int(v), + "contains": lambda r, v: v in r, + } + lhs = jmespath.search(self.left, data) + valid = True + if lhs: + # Check rhs for compiled jmespath expression + if isinstance(self.right, jmespath.parser.ParsedResult): + rhs = self.right.search(data) + else: + rhs = self.right + valid = operators[self.operator](lhs, rhs) + if not valid: + self.add_validation_error(self.error) + + +def is_validator(obj) -> bool: + """Returns True if the object is a BaseValidation or JmesPathModelValidation subclass.""" + try: + return issubclass(obj, BaseValidation) and obj not in (JmesPathModelValidation, BaseValidation) + except TypeError: + return False + + +def load_validators(validator_path: str) -> dict[str, BaseValidation]: + """Load all validator plugins from validator_path.""" + validators = dict() + for importer, module_name, _ in pkgutil.iter_modules([validator_path]): + module = importer.find_module(module_name).load_module(module_name) + for name, cls in inspect.getmembers(module, is_validator): + # Default to class name if id doesn't exist + if not hasattr(cls, "id"): + cls.id = name + if cls.id in validators: + print( + f"Unable to load the validator {cls.id}, there is already a validator with the same name ({name})." + ) + else: + validators[cls.id] = cls() + return validators diff --git a/tasks.py b/tasks.py index 5f28510..b23fbc2 100644 --- a/tasks.py +++ b/tasks.py @@ -12,8 +12,8 @@ def project_ver(): """Find version from pyproject.toml to use for docker image tagging.""" - with open("pyproject.toml") as file: - return toml.load(file)["tool"]["poetry"].get("version", "latest") + with open("pyproject.toml") as config_file: + return toml.load(config_file)["tool"]["poetry"].get("version", "latest") def is_truthy(arg): @@ -22,6 +22,7 @@ def is_truthy(arg): Examples: >>> is_truthy('yes') True + Args: arg (str): Truthy string (True values are y, yes, t, true, on and 1; false values are n, no, f, false, off and 0. Raises ValueError if val is anything else. @@ -31,141 +32,172 @@ def is_truthy(arg): return bool(strtobool(arg)) +PYPROJECT_CONFIG = toml.load("pyproject.toml") +TOOL_CONFIG = PYPROJECT_CONFIG["tool"]["poetry"] + # Can be set to a separate Python version to be used for launching or building image PYTHON_VER = os.getenv("PYTHON_VER", "3.7") # Can be set to a separate ANsible version to be used for launching or building image -ANSIBLE_VER = os.getenv("ANSIBLE_VER", None) +ANSIBLE_VER = os.getenv("ANSIBLE_VER", "2.10.8") +ANSIBLE_PACKAGE = os.getenv("ANSIBLE_PACKAGE", "ansible-base") # Name of the docker image/image -NAME = os.getenv("IMAGE_NAME", f"schema-enforcer-py{PYTHON_VER}") +IMAGE_NAME = os.getenv("IMAGE_NAME", TOOL_CONFIG["name"]) # Tag for the image -IMAGE_VER = os.getenv("IMAGE_VER", project_ver()) +IMAGE_VER = os.getenv("IMAGE_VER", f"{TOOL_CONFIG['version']}-py{PYTHON_VER}") # Gather current working directory for Docker commands PWD = os.getcwd() # Local or Docker execution provide "local" to run locally without docker execution INVOKE_LOCAL = is_truthy(os.getenv("INVOKE_LOCAL", False)) # pylint: disable=W1508 -def run_cmd(context, exec_cmd, name=NAME, image_ver=IMAGE_VER, local=INVOKE_LOCAL): +def _get_image_name(with_ansible=False): + """Gets the name of the container image to use. + + Args: + with_ansible (bool): Get name of container image with Ansible installed. + + Returns: + str: Name of container image. Includes tag. + """ + if with_ansible: + name = f"{IMAGE_NAME}:{IMAGE_VER}-{ANSIBLE_PACKAGE}{ANSIBLE_VER}" + else: + name = f"{IMAGE_NAME}:{IMAGE_VER}" + + return name + + +def run_cmd(context, exec_cmd, with_ansible=False): """Wrapper to run the invoke task commands. Args: - context ([invoke.task]): Invoke task object. - exec_cmd ([str]): Command to run. - name ([str], optional): Image name to use if exec_env is `docker`. Defaults to NAME. - image_ver ([str], optional): Version of image to use if exec_env is `docker`. Defaults to IMAGE_VER. - local (bool): Define as `True` to execute locally + context (invoke.task): Invoke task object. + exec_cmd (str): Command to run. + with_ansible (bool): Whether to run the command in a container that has ansible installed Returns: result (obj): Contains Invoke result from running task. """ - if is_truthy(local): + name = _get_image_name(with_ansible) + + if INVOKE_LOCAL: print(f"LOCAL - Running command {exec_cmd}") result = context.run(exec_cmd, pty=True) else: - print(f"DOCKER - Running command: {exec_cmd} container: {name}:{image_ver}") - result = context.run(f"docker run -it -v {PWD}:/local {name}:{image_ver} sh -c '{exec_cmd}'", pty=True) + print(f"DOCKER - Running command: {exec_cmd} container: {name}") + result = context.run(f"docker run -it -v {PWD}:/local {name} sh -c '{exec_cmd}'", pty=True) return result @task -def build( - context, - name=NAME, - python_ver=PYTHON_VER, - ansible_ver=ANSIBLE_VER, - image_ver=IMAGE_VER, - nocache=False, - forcerm=False, - without_ansible=False, +def build_image( + context, cache=True, force_rm=False, hide=False, with_ansible=False ): # pylint: disable=too-many-arguments - """This will build an image with the provided name and python version. + """Builds a container with schema-enforcer installed. Args: - context (obj): Used to run specific commands - name (str): Used to name the docker image - python_ver (str): Define the Python version docker image to build from - ansible_ver (str): Define the Ansible version which will be installed. Defaults to pyproject.toml definition if not specified. - image_ver (str): Define image version - nocache (bool): Do not use cache when building the image - forcerm (bool): Always remove intermediate containers - without_ansible (bool): Build image without ansible + context (invoke.task): Invoke task object + cache (bool): Do not use cache when building the image + force_rm (bool): Always remove intermediate containers + hide: (bool): Suppress output from docker build + with_ansible (bool): Build a container with Ansible installed """ - if without_ansible: - stdout_string = f"Building image {name}-without-ansible:{image_ver} without ansible" - command = f"docker build --tag {name}-without-ansible:{image_ver} --build-arg PYTHON_VER={python_ver} " - command += "--target without_ansible " - - else: - command = f"docker build --tag {name}:{image_ver} --build-arg PYTHON_VER={python_ver} " - if ansible_ver: - stdout_string = f"Building image {name}:{image_ver} with ansible version {ansible_ver}" - command += f"--build-arg ANSIBLE_VER={ansible_ver} " - else: - stdout_string = f"Building image {name}:{image_ver} with ansible version specified in pyproject.toml file." + name = _get_image_name(with_ansible) + env = {"PYTHON_VER": PYTHON_VER} - command += "--target base " + if with_ansible: + env["ANSIBLE_VER"] = ANSIBLE_VER + env["ANSIBLE_PACKAGE"] = ANSIBLE_PACKAGE + command = f"docker build --tag {name} --target with_ansible" + command += f" --build-arg ANSIBLE_VER={ANSIBLE_VER} --build-arg ANSIBLE_PACKAGE={ANSIBLE_PACKAGE}" - command += "-f Dockerfile ." + else: + command = command = f"docker build --tag {name} --target base" - if nocache: + command += f" --build-arg PYTHON_VER={PYTHON_VER} -f Dockerfile ." + if not cache: command += " --no-cache" - if forcerm: + if force_rm: command += " --force-rm" - print(stdout_string) - result = context.run(command, hide=True) + print(f"Building image {name}") + result = context.run(command, hide=hide, env=env) + if result.exited != 0: - print(f"Failed to build image {name}:{image_ver}\nError: {result.stderr}") + print(f"Failed to build image {name}\nError: {result.stderr}") @task -def clean(context, name=NAME, image_ver=IMAGE_VER): - """This will remove the specific image. +def clean_image(context, with_ansible=False): + """Remove the schema-enforcer container. + + Args: + context (obj): Used to run specific commands + with_ansible (bool): Remove schema-enforcer container with ansible installed + """ + name = _get_image_name(with_ansible) + print(f"Attempting to forcefully remove image {name}") + context.run(f"docker rmi {name} --force") + + +@task( + help={ + "cache": "Whether to use Docker's cache when building images (default enabled)", + "force_rm": "Always remove intermediate images", + "hide": "Suppress output from Docker", + } +) +def build(context, cache=True, force_rm=False, hide=False): + """This will build an image with the provided name and python version. + + Args: + context (obj): Used to run specific commands + cache (bool): Do not use cache when building the image + force_rm (bool): Always remove intermediate containers + hide (bool): Suppress output from docker build + """ + build_image(context, cache, force_rm, hide=hide) + build_image(context, cache, force_rm, hide=hide, with_ansible=True) + + +@task +def clean(context): + """This will remove a specific image. Args: context (obj): Used to run specific commands - name (str): Used to name the docker image - image_ver (str): Define image version """ - print(f"Attempting to forcefully remove image {name}:{image_ver}") - context.run(f"docker rmi {name}:{image_ver} --force") - print(f"Successfully removed image {name}:{image_ver}") + clean_image(context) + clean_image(context, with_ansible=True) @task -def rebuild(context, name=NAME, python_ver=PYTHON_VER, image_ver=IMAGE_VER): +def rebuild(context, cache=True, force_rm=False): """This will clean the image and then rebuild image without using cache. Args: context (obj): Used to run specific commands - name (str): Used to name the docker image - python_ver (str): Define the Python version docker image to build from - image_ver (str): Define image version + cache (bool): Use cache for rebuild + force_rm (bool): Always remove intermediate containers """ - clean(context, name, image_ver) - build(context, name, python_ver, image_ver) + clean(context) + build(context, cache=cache, force_rm=force_rm) @task -def pytest(context, name=NAME, image_ver=IMAGE_VER, local=INVOKE_LOCAL): +def pytest(context): """This will run pytest for the specified name and Python version. Args: context (obj): Used to run specific commands - name (str): Used to name the docker image - image_ver (str): Will use the container version docker image - local (bool): Define as `True` to execute locally """ - # pty is set to true to properly run the docker commands due to the invocation process of docker - # https://docs.pyinvoke.org/en/latest/api/runners.html - Search for pty for more information - # Install python module - exec_cmd = 'find tests/ -name "*.py" -a -not -name "test_cli_ansible_not_exists.py" | xargs pytest -vv' - run_cmd(context, exec_cmd, name, image_ver, local) + exec_cmd = 'find tests/ -name "test_*.py" -a -not -name "test_cli_ansible_not_exists.py" | xargs pytest -vv' + run_cmd(context, exec_cmd, with_ansible=True) @task -def pytest_without_ansible(context, name=f"{NAME}-without-ansible", image_ver=IMAGE_VER, local=INVOKE_LOCAL): +def pytest_without_ansible(context): """This will run pytest only to assert the correct errors are raised when pytest is not installed. This must be run inside of a container or environment in which ansible is not installed, otherwise the test case @@ -173,65 +205,46 @@ def pytest_without_ansible(context, name=f"{NAME}-without-ansible", image_ver=IM Args: context (obj): Used to run specific commands - name (str): Used to name the docker image - image_ver (str): Will use the container version docker image - local (bool): Define as `True` to execute locally """ exec_cmd = 'find tests/ -name "test_cli_ansible_not_exists.py" | xargs pytest -vv' - - run_cmd(context, exec_cmd, name, image_ver, local) + run_cmd(context, exec_cmd) @task -def black(context, name=NAME, image_ver=IMAGE_VER, local=INVOKE_LOCAL): +def black(context): """This will run black to check that Python files adherence to black standards. Args: context (obj): Used to run specific commands - name (str): Used to name the docker image - image_ver (str): Define image version - local (bool): Define as `True` to execute locally """ - # pty is set to true to properly run the docker commands due to the invocation process of docker - # https://docs.pyinvoke.org/en/latest/api/runners.html - Search for pty for more information exec_cmd = "black --check --diff ." - run_cmd(context, exec_cmd, name, image_ver, local) + run_cmd(context, exec_cmd, with_ansible=True) @task -def flake8(context, name=NAME, image_ver=IMAGE_VER, local=INVOKE_LOCAL): +def flake8(context): """This will run flake8 for the specified name and Python version. Args: context (obj): Used to run specific commands - name (str): Used to name the docker image - image_ver (str): Define image version - local (bool): Define as `True` to execute locally """ - # pty is set to true to properly run the docker commands due to the invocation process of docker - # https://docs.pyinvoke.org/en/latest/api/runners.html - Search for pty for more information exec_cmd = "flake8 ." - run_cmd(context, exec_cmd, name, image_ver, local) + run_cmd(context, exec_cmd, with_ansible=True) @task -def pylint(context, name=NAME, image_ver=IMAGE_VER, local=INVOKE_LOCAL): +def pylint(context): """This will run pylint for the specified name and Python version. Args: context (obj): Used to run specific commands - name (str): Used to name the docker image - image_ver (str): Define image version - local (bool): Define as `True` to execute locally """ - # pty is set to true to properly run the docker commands due to the invocation process of docker - # https://docs.pyinvoke.org/en/latest/api/runners.html - Search for pty for more information exec_cmd = 'find . -name "*.py" | xargs pylint' - run_cmd(context, exec_cmd, name, image_ver, local) + run_cmd(context, exec_cmd, with_ansible=True) @task -def yamllint(context, name=NAME, image_ver=IMAGE_VER, local=INVOKE_LOCAL): +def yamllint(context): """This will run yamllint to validate formatting adheres to NTC defined YAML standards. Args: @@ -240,92 +253,58 @@ def yamllint(context, name=NAME, image_ver=IMAGE_VER, local=INVOKE_LOCAL): image_ver (str): Define image version local (bool): Define as `True` to execute locally """ - # pty is set to true to properly run the docker commands due to the invocation process of docker - # https://docs.pyinvoke.org/en/latest/api/runners.html - Search for pty for more information exec_cmd = "yamllint ." - run_cmd(context, exec_cmd, name, image_ver, local) + run_cmd(context, exec_cmd, with_ansible=True) @task -def pydocstyle(context, name=NAME, image_ver=IMAGE_VER, local=INVOKE_LOCAL): +def pydocstyle(context): """This will run pydocstyle to validate docstring formatting adheres to NTC defined standards. Args: context (obj): Used to run specific commands - name (str): Used to name the docker image - image_ver (str): Define image version - local (bool): Define as `True` to execute locally """ - # pty is set to true to properly run the docker commands due to the invocation process of docker - # https://docs.pyinvoke.org/en/latest/api/runners.html - Search for pty for more information exec_cmd = "pydocstyle ." - run_cmd(context, exec_cmd, name, image_ver, local) + run_cmd(context, exec_cmd, with_ansible=True) @task -def bandit(context, name=NAME, image_ver=IMAGE_VER, local=INVOKE_LOCAL): +def bandit(context): """This will run bandit to validate basic static code security analysis. Args: context (obj): Used to run specific commands - name (str): Used to name the docker image - image_ver (str): Define image version - local (bool): Define as `True` to execute locally """ - # pty is set to true to properly run the docker commands due to the invocation process of docker - # https://docs.pyinvoke.org/en/latest/api/runners.html - Search for pty for more information exec_cmd = "bandit --recursive ./ --configfile .bandit.yml" - run_cmd(context, exec_cmd, name, image_ver, local) - - -@task -def cli(context, name=NAME, image_ver=IMAGE_VER, without_ansible=False): - """This will enter the image to perform troubleshooting or dev work. - - Args: - context (obj): Used to run specific commands - name (str): Used to name the docker image - image_ver (str): Define image version - without_ansible (bool): Enter cli in without-ansible container - """ - if without_ansible: - name = f"{name}-without-ansible" - - dev = f"docker run -it -v {PWD}:/local {name}:{image_ver} /bin/bash" - context.run(f"{dev}", pty=True) + run_cmd(context, exec_cmd, with_ansible=True) @task -def tests(context, name=NAME, image_ver=IMAGE_VER, local=INVOKE_LOCAL): +def tests(context): """This will run all tests for the specified name and Python version. Args: context (obj): Used to run specific commands - name (str): Used to name the docker image - image_ver (str): Define image version - local (bool): Define as `True` to execute locally """ - black(context, name, image_ver, local) - flake8(context, name, image_ver, local) - pylint(context, name, image_ver, local) - yamllint(context, name, image_ver, local) - pydocstyle(context, name, image_ver, local) - bandit(context, name, image_ver, local) - pytest(context, name, image_ver, local) - + black(context) + flake8(context) + pylint(context) + yamllint(context) + pydocstyle(context) + bandit(context) + pytest(context) + pytest_without_ansible(context) print("All tests have passed!") @task -def tests_without_ansible(context, name=f"{NAME}-without-ansible", image_ver=IMAGE_VER, local=INVOKE_LOCAL): - """This will run all tests for the specified name and Python version. +def cli(context, with_ansible=False): + """This will enter the image to perform troubleshooting or dev work. Args: context (obj): Used to run specific commands - name (str): Used to name the docker image - image_ver (str): Define image version - local (bool): Define as `True` to execute locally + with_ansible (str): Attach to container with ansible version specified by the 'ANSIBLE_VER' env var """ - pytest_without_ansible(context, name, image_ver, local) - - print("All tests have passed!") + name = _get_image_name(with_ansible) + dev = f"docker run -it -v {PWD}:/local {name} /bin/bash" + context.run(f"{dev}", pty=True) diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..69d234e --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,54 @@ +"""conftest file for pytest""" +import glob +import os +from schema_enforcer.utils import load_file +from schema_enforcer.schemas.jsonschema import JsonSchema + + +FIXTURES_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "fixtures", "test_jsonschema") +FORMAT_CHECK_ERROR_MESSAGE_MAPPING = { + "incorrect_regex_format": "'[' is not a 'regex'", + "incorrect_date_format": "'2021-111-28' is not a 'date'", + "incorrect_hostname_format": "'ntc@ntc.com' is not a 'hostname'", + "incorrect_uri_format": "'sftp//' is not a 'uri'", + "incorrect_jsonptr_format": "'fakejsonptr' is not a 'json-pointer'", + "incorrect_email_format": "'networktocode.code.com' is not a 'email'", + "incorrect_ipv4_format": "'10.1.1.300' is not a 'ipv4'", + "incorrect_ipv6_format": "'2001:00000:3238:DFE1:63:0000:0000:FEFB' is not a 'ipv6'", + "incorrect_time_format": "'20:20:33333+00:00' is not a 'time'", + "incorrect_datetime_format": "'January 29th 2021' is not a 'date-time'", + "incorrect_iri_format": "'fake_iri' is not a 'iri'", +} + + +def pytest_generate_tests(metafunc): + """Pytest_generate_tests prehook""" + if metafunc.function.__name__ == "test_format_checkers": + schema_files = glob.glob(f"{FIXTURES_DIR}/schema/schemas/incorrect_*.yml") + schema_instances = [] + for schema_file in schema_files: + schema_instance = JsonSchema( + schema=load_file(schema_file), + filename=os.path.basename(schema_file), + root=os.path.join(FIXTURES_DIR, "schema", "schemas"), + ) + schema_instances.append(schema_instance) + + data_files = glob.glob(f"{FIXTURES_DIR}/hostvars/spa-madrid-rt1/incorrect_*.yml") + data_instances = [] + for data_file in data_files: + data = load_file(data_file) + data_instances.append(data) + + metafunc.parametrize( + "schema_instance,data_instance, expected_error_message", + [ + ( + schema_instances[i], + data_instances[i], + FORMAT_CHECK_ERROR_MESSAGE_MAPPING.get(os.path.basename(schema_files[i])[:-4]), + ) + for i in range(0, len(schema_instances)) + ], + ids=[os.path.basename(schema_files[i])[:-4] for i in range(0, len(schema_instances))], + ) diff --git a/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/dns.yml b/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/dns.yml new file mode 100644 index 0000000..468a4aa --- /dev/null +++ b/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/dns.yml @@ -0,0 +1,4 @@ +--- +dns_servers: + - address: "10.1.1.300" + - address: "10.7.7.7" diff --git a/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_date_format.yml b/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_date_format.yml new file mode 100644 index 0000000..7516b7b --- /dev/null +++ b/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_date_format.yml @@ -0,0 +1,3 @@ +--- +dns_servers: + - last_rebooted: "2021-111-28" diff --git a/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_datetime_format.yml b/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_datetime_format.yml new file mode 100644 index 0000000..df3b2c5 --- /dev/null +++ b/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_datetime_format.yml @@ -0,0 +1,3 @@ +--- +dns_servers: + - last_rebooted: "January 29th 2021" diff --git a/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_email_format.yml b/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_email_format.yml new file mode 100644 index 0000000..8da8945 --- /dev/null +++ b/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_email_format.yml @@ -0,0 +1,3 @@ +--- +dns_servers: + - contact_email: "networktocode.code.com" diff --git a/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_hostname_format.yml b/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_hostname_format.yml new file mode 100644 index 0000000..ba1d9f0 --- /dev/null +++ b/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_hostname_format.yml @@ -0,0 +1,3 @@ +--- +dns_servers: + - hostname: "ntc@ntc.com" diff --git a/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_ipv4_format.yml b/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_ipv4_format.yml new file mode 100644 index 0000000..468a4aa --- /dev/null +++ b/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_ipv4_format.yml @@ -0,0 +1,4 @@ +--- +dns_servers: + - address: "10.1.1.300" + - address: "10.7.7.7" diff --git a/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_ipv6_format.yml b/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_ipv6_format.yml new file mode 100644 index 0000000..d5c2bba --- /dev/null +++ b/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_ipv6_format.yml @@ -0,0 +1,3 @@ +--- +dns_servers: + - address: "2001:00000:3238:DFE1:63:0000:0000:FEFB" diff --git a/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_iri_format.yml b/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_iri_format.yml new file mode 100644 index 0000000..6011c76 --- /dev/null +++ b/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_iri_format.yml @@ -0,0 +1,3 @@ +--- +dns_servers: + - iri: "fake_iri" diff --git a/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_jsonptr_format.yml b/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_jsonptr_format.yml new file mode 100644 index 0000000..c632811 --- /dev/null +++ b/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_jsonptr_format.yml @@ -0,0 +1,3 @@ +--- +dns_servers: + - json_ptr: "fakejsonptr" diff --git a/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_regex_format.yml b/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_regex_format.yml new file mode 100644 index 0000000..9671166 --- /dev/null +++ b/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_regex_format.yml @@ -0,0 +1,3 @@ +--- +dns_servers: + - site_prefix: "[" diff --git a/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_time_format.yml b/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_time_format.yml new file mode 100644 index 0000000..0c2181d --- /dev/null +++ b/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_time_format.yml @@ -0,0 +1,3 @@ +--- +dns_servers: + - uptime: "20:20:33333+00:00" diff --git a/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_uri_format.yml b/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_uri_format.yml new file mode 100644 index 0000000..22a027e --- /dev/null +++ b/tests/fixtures/test_jsonschema/hostvars/spa-madrid-rt1/incorrect_uri_format.yml @@ -0,0 +1,3 @@ +--- +dns_servers: + - uri: "sftp//" diff --git a/tests/fixtures/test_jsonschema/schema/schemas/incorrect_date_format.yml b/tests/fixtures/test_jsonschema/schema/schemas/incorrect_date_format.yml new file mode 100644 index 0000000..95e60b1 --- /dev/null +++ b/tests/fixtures/test_jsonschema/schema/schemas/incorrect_date_format.yml @@ -0,0 +1,19 @@ +--- +$schema: "http://json-schema.org/draft-07/schema#" +$id: "schemas/incorrect_date_format" +description: "DNS Server Configuration schema." +type: "object" +properties: + dns_servers: + type: "array" + items: + type: "object" + properties: + last_rebooted: + type: "string" + format: "date" + required: + - "last_rebooted" + uniqueItems: true +required: + - "dns_servers" diff --git a/tests/fixtures/test_jsonschema/schema/schemas/incorrect_datetime_format.yml b/tests/fixtures/test_jsonschema/schema/schemas/incorrect_datetime_format.yml new file mode 100644 index 0000000..fe9e066 --- /dev/null +++ b/tests/fixtures/test_jsonschema/schema/schemas/incorrect_datetime_format.yml @@ -0,0 +1,19 @@ +--- +$schema: "http://json-schema.org/draft-07/schema#" +$id: "schemas/incorrect_datetime_format" +description: "DNS Server Configuration schema." +type: "object" +properties: + dns_servers: + type: "array" + items: + type: "object" + properties: + last_rebooted: + type: "string" + format: "date-time" + required: + - "last_rebooted" + uniqueItems: true +required: + - "dns_servers" diff --git a/tests/fixtures/test_jsonschema/schema/schemas/incorrect_email_format.yml b/tests/fixtures/test_jsonschema/schema/schemas/incorrect_email_format.yml new file mode 100644 index 0000000..df77b8c --- /dev/null +++ b/tests/fixtures/test_jsonschema/schema/schemas/incorrect_email_format.yml @@ -0,0 +1,19 @@ +--- +$schema: "http://json-schema.org/draft-07/schema#" +$id: "schemas/incorrect_email_format" +description: "DNS Server Configuration schema." +type: "object" +properties: + dns_servers: + type: "array" + items: + type: "object" + properties: + contact_email: + type: "string" + format: "email" + required: + - "contact_email" + uniqueItems: true +required: + - "dns_servers" diff --git a/tests/fixtures/test_jsonschema/schema/schemas/incorrect_hostname_format.yml b/tests/fixtures/test_jsonschema/schema/schemas/incorrect_hostname_format.yml new file mode 100644 index 0000000..0fbc28c --- /dev/null +++ b/tests/fixtures/test_jsonschema/schema/schemas/incorrect_hostname_format.yml @@ -0,0 +1,19 @@ +--- +$schema: "http://json-schema.org/draft-07/schema#" +$id: "schemas/incorrect_hostname_format" +description: "DNS Server Configuration schema." +type: "object" +properties: + dns_servers: + type: "array" + items: + type: "object" + properties: + hostname: + type: "string" + format: "hostname" + required: + - "hostname" + uniqueItems: true +required: + - "dns_servers" diff --git a/tests/fixtures/test_jsonschema/schema/schemas/incorrect_ipv4_format.yml b/tests/fixtures/test_jsonschema/schema/schemas/incorrect_ipv4_format.yml new file mode 100755 index 0000000..8b5d337 --- /dev/null +++ b/tests/fixtures/test_jsonschema/schema/schemas/incorrect_ipv4_format.yml @@ -0,0 +1,23 @@ +--- +$schema: "http://json-schema.org/draft-07/schema#" +$id: "schemas/incorrect_ipv4_format" +description: "DNS Server Configuration schema." +type: "object" +properties: + dns_servers: + type: "array" + items: + type: "object" + properties: + name: + type: "string" + address: + type: "string" + format: "ipv4" + vrf: + type: "string" + required: + - "address" + uniqueItems: true +required: + - "dns_servers" diff --git a/tests/fixtures/test_jsonschema/schema/schemas/incorrect_ipv6_format.yml b/tests/fixtures/test_jsonschema/schema/schemas/incorrect_ipv6_format.yml new file mode 100644 index 0000000..1dd109d --- /dev/null +++ b/tests/fixtures/test_jsonschema/schema/schemas/incorrect_ipv6_format.yml @@ -0,0 +1,19 @@ +--- +$schema: "http://json-schema.org/draft-07/schema#" +$id: "schemas/incorrect_ipv6_format" +description: "DNS Schema to test ipv6 formatter." +type: "object" +properties: + dns_servers: + type: "array" + items: + type: "object" + properties: + address: + type: "string" + format: "ipv6" + required: + - "address" + uniqueItems: true +required: + - "dns_servers" diff --git a/tests/fixtures/test_jsonschema/schema/schemas/incorrect_iri_format.yml b/tests/fixtures/test_jsonschema/schema/schemas/incorrect_iri_format.yml new file mode 100644 index 0000000..2063821 --- /dev/null +++ b/tests/fixtures/test_jsonschema/schema/schemas/incorrect_iri_format.yml @@ -0,0 +1,19 @@ +--- +$schema: "http://json-schema.org/draft-07/schema#" +$id: "schemas/incorrect_iri_format" +description: "DNS Server Configuration schema." +type: "object" +properties: + dns_servers: + type: "array" + items: + type: "object" + properties: + iri: + type: "string" + format: "iri" + required: + - "iri" + uniqueItems: true +required: + - "dns_servers" diff --git a/tests/fixtures/test_jsonschema/schema/schemas/incorrect_jsonptr_format.yml b/tests/fixtures/test_jsonschema/schema/schemas/incorrect_jsonptr_format.yml new file mode 100644 index 0000000..2030e8f --- /dev/null +++ b/tests/fixtures/test_jsonschema/schema/schemas/incorrect_jsonptr_format.yml @@ -0,0 +1,19 @@ +--- +$schema: "http://json-schema.org/draft-07/schema#" +$id: "schemas/incorrect_jsonptr_format" +description: "DNS Schema to test ipv6 formatter." +type: "object" +properties: + dns_servers: + type: "array" + items: + type: "object" + properties: + json_ptr: + type: "string" + format: "json-pointer" + required: + - "json_ptr" + uniqueItems: true +required: + - "dns_servers" diff --git a/tests/fixtures/test_jsonschema/schema/schemas/incorrect_regex_format.yml b/tests/fixtures/test_jsonschema/schema/schemas/incorrect_regex_format.yml new file mode 100644 index 0000000..1bcc8e4 --- /dev/null +++ b/tests/fixtures/test_jsonschema/schema/schemas/incorrect_regex_format.yml @@ -0,0 +1,19 @@ +--- +$schema: "http://json-schema.org/draft-07/schema#" +$id: "schemas/incorrect_regex_format" +description: "DNS Server Configuration schema." +type: "object" +properties: + dns_servers: + type: "array" + items: + type: "object" + properties: + site_prefix: + type: "string" + format: "regex" + required: + - "site_prefix" + uniqueItems: true +required: + - "dns_servers" diff --git a/tests/fixtures/test_jsonschema/schema/schemas/incorrect_time_format.yml b/tests/fixtures/test_jsonschema/schema/schemas/incorrect_time_format.yml new file mode 100644 index 0000000..3960621 --- /dev/null +++ b/tests/fixtures/test_jsonschema/schema/schemas/incorrect_time_format.yml @@ -0,0 +1,19 @@ +--- +$schema: "http://json-schema.org/draft-07/schema#" +$id: "schemas/incorrect_time_format" +description: "DNS Server Configuration schema." +type: "object" +properties: + dns_servers: + type: "array" + items: + type: "object" + properties: + uptime: + type: "string" + format: "time" + required: + - "uptime" + uniqueItems: true +required: + - "dns_servers" diff --git a/tests/fixtures/test_jsonschema/schema/schemas/incorrect_uri_format.yml b/tests/fixtures/test_jsonschema/schema/schemas/incorrect_uri_format.yml new file mode 100644 index 0000000..b140720 --- /dev/null +++ b/tests/fixtures/test_jsonschema/schema/schemas/incorrect_uri_format.yml @@ -0,0 +1,19 @@ +--- +$schema: "http://json-schema.org/draft-07/schema#" +$id: "schemas/incorrect_uri_format" +description: "DNS Server Configuration schema." +type: "object" +properties: + dns_servers: + type: "array" + items: + type: "object" + properties: + uri: + type: "string" + format: "uri" + required: + - "uri" + uniqueItems: true +required: + - "dns_servers" diff --git a/tests/fixtures/test_jsonschema/schema/schemas/invalid.yml b/tests/fixtures/test_jsonschema/schema/schemas/invalid.yml new file mode 100644 index 0000000..6d57fbf --- /dev/null +++ b/tests/fixtures/test_jsonschema/schema/schemas/invalid.yml @@ -0,0 +1,11 @@ +--- +$schema: "http://json-schema.org/draft-07/schema#" +$id: "schemas/checks" +description: "Schema Checks" +type: "object" +properties: + type: "integer" + items: + type: "object" + properties: + name: "bla bla bla bla" diff --git a/tests/fixtures/test_manager/invalid/pyproject.toml b/tests/fixtures/test_manager/invalid/pyproject.toml new file mode 100644 index 0000000..a5bf0ea --- /dev/null +++ b/tests/fixtures/test_manager/invalid/pyproject.toml @@ -0,0 +1,4 @@ +[tool.schema_enforcer] +schema_file_exclude_filenames = [] + +schema_directory = "schemas" diff --git a/tests/fixtures/test_manager/invalid/schema/schemas/invalid.yml b/tests/fixtures/test_manager/invalid/schema/schemas/invalid.yml new file mode 100644 index 0000000..6d57fbf --- /dev/null +++ b/tests/fixtures/test_manager/invalid/schema/schemas/invalid.yml @@ -0,0 +1,11 @@ +--- +$schema: "http://json-schema.org/draft-07/schema#" +$id: "schemas/checks" +description: "Schema Checks" +type: "object" +properties: + type: "integer" + items: + type: "object" + properties: + name: "bla bla bla bla" diff --git a/tests/fixtures/test_manager/invalid_generate/schema/schemas/test.yml b/tests/fixtures/test_manager/invalid_generate/schema/schemas/test.yml new file mode 100644 index 0000000..ec68708 --- /dev/null +++ b/tests/fixtures/test_manager/invalid_generate/schema/schemas/test.yml @@ -0,0 +1,25 @@ +--- +$schema: "http://json-schema.org/draft-07/schema#" +$id: "schemas/test" +description: "NFTables Firewall Configuration schema." +type: "object" +properties: + firewall: + type: "object" + uniqueItems: true + additionalProperties: false + required: + - "rule" + - "variables" + properties: + rule: + type: "object" + properties: + bool: + type: "boolean" + Text: + type: "string" + dict: + type: "object" + variables: + type: "object" diff --git a/tests/fixtures/test_manager/invalid_generate/schema/tests/test/invalid/invalid_type1/data.json b/tests/fixtures/test_manager/invalid_generate/schema/tests/test/invalid/invalid_type1/data.json new file mode 100644 index 0000000..9154b9e --- /dev/null +++ b/tests/fixtures/test_manager/invalid_generate/schema/tests/test/invalid/invalid_type1/data.json @@ -0,0 +1,13 @@ +{ + "firewall": { + "rule": { + "bool": "true", + "Text": "text", + "dict": {} + }, + "variables": { + "Text": "text", + "array": [] + } + } +} \ No newline at end of file diff --git a/tests/fixtures/test_manager/invalid_generate/schema/tests/test/invalid/invalid_type1/exp_results.yml b/tests/fixtures/test_manager/invalid_generate/schema/tests/test/invalid/invalid_type1/exp_results.yml new file mode 100644 index 0000000..286c6ca --- /dev/null +++ b/tests/fixtures/test_manager/invalid_generate/schema/tests/test/invalid/invalid_type1/exp_results.yml @@ -0,0 +1,9 @@ +--- +results: + - result: "FAIL" + schema_id: "schemas/test" + absolute_path: + - "firewall" + - "rule" + - "bool" + message: "'true' is not of type 'boolean'" diff --git a/tests/fixtures/test_manager/invalid_generate/schema/tests/test/invalid/invalid_type2/data.json b/tests/fixtures/test_manager/invalid_generate/schema/tests/test/invalid/invalid_type2/data.json new file mode 100644 index 0000000..eea41ce --- /dev/null +++ b/tests/fixtures/test_manager/invalid_generate/schema/tests/test/invalid/invalid_type2/data.json @@ -0,0 +1,13 @@ +{ + "firewall": { + "rule": { + "bool": true, + "Text": 123, + "dict": {} + }, + "variables": { + "Text": "text", + "array": {} + } + } +} \ No newline at end of file diff --git a/tests/fixtures/test_manager/invalid_generate/schema/tests/test/invalid/invalid_type2/exp_results.yml b/tests/fixtures/test_manager/invalid_generate/schema/tests/test/invalid/invalid_type2/exp_results.yml new file mode 100644 index 0000000..57a2b73 --- /dev/null +++ b/tests/fixtures/test_manager/invalid_generate/schema/tests/test/invalid/invalid_type2/exp_results.yml @@ -0,0 +1,9 @@ +--- +results: + - result: "FAIL" + schema_id: "schemas/test" + absolute_path: + - "firewall" + - "rule" + - "Text" + message: "123 is not of type 'string'" diff --git a/tests/fixtures/test_manager/invalid_generate/schema/tests/test/valid/test.json b/tests/fixtures/test_manager/invalid_generate/schema/tests/test/valid/test.json new file mode 100644 index 0000000..84dd392 --- /dev/null +++ b/tests/fixtures/test_manager/invalid_generate/schema/tests/test/valid/test.json @@ -0,0 +1,13 @@ +{ + "firewall": { + "rule": { + "bool": true, + "Text": "text", + "dict": {} + }, + "variables": { + "Text": "text", + "array": [] + } + } +} \ No newline at end of file diff --git a/tests/fixtures/test_validators/inventory/host_vars/az_phx_pe01/base.yml b/tests/fixtures/test_validators/inventory/host_vars/az_phx_pe01/base.yml new file mode 100644 index 0000000..e8adf5e --- /dev/null +++ b/tests/fixtures/test_validators/inventory/host_vars/az_phx_pe01/base.yml @@ -0,0 +1,22 @@ +--- +hostname: "az-phx-pe01" +pair_rtr: "az-phx-pe02" +upstreams: [] +interfaces: + MgmtEth0/0/CPU0/0: + ipv4: "172.16.1.1" + Loopback0: + ipv4: "192.168.1.1" + ipv6: "2001:db8:1::1" + GigabitEthernet0/0/0/0: + ipv4: "10.1.0.1" + ipv6: "2001:db8::" + peer: "az-phx-pe02" + peer_int: "GigabitEthernet0/0/0/0" + type: "core" + GigabitEthernet0/0/0/1: + ipv4: "10.1.0.37" + ipv6: "2001:db8::12" + peer: "co-den-p01" + peer_int: "GigabitEthernet0/0/0/2" + type: "core" diff --git a/tests/fixtures/test_validators/inventory/host_vars/az_phx_pe02/base.yml b/tests/fixtures/test_validators/inventory/host_vars/az_phx_pe02/base.yml new file mode 100644 index 0000000..3cfbd09 --- /dev/null +++ b/tests/fixtures/test_validators/inventory/host_vars/az_phx_pe02/base.yml @@ -0,0 +1,22 @@ +--- +hostname: "az-phx-pe02" +pair_rtr: "az-phx-pe01" +upstreams: [] +interfaces: + MgmtEth0/0/CPU0/0: + ipv4: "172.16.1.2" + Loopback0: + ipv4: "192.168.1.2" + ipv6: "2001:db8:1::2" + GigabitEthernet0/0/0/0: + ipv4: "10.1.0.2" + ipv6: "2001:db8::1" + peer: "az-phx-pe01" + peer_int: "GigabitEthernet0/0/0/0" + type: "core" + GigabitEthernet0/0/0/1: + ipv4: "10.1.0.41" + ipv6: "2001:db8::14" + peer: "co-den-p02" + peer_int: "GigabitEthernet0/0/0/2" + type: "access" diff --git a/tests/fixtures/test_validators/inventory/host_vars/co_den_p01/base.yml b/tests/fixtures/test_validators/inventory/host_vars/co_den_p01/base.yml new file mode 100644 index 0000000..d9d2692 --- /dev/null +++ b/tests/fixtures/test_validators/inventory/host_vars/co_den_p01/base.yml @@ -0,0 +1,19 @@ +--- +hostname: "co-den-p01" +pair_rtr: "co-den-p02" +interfaces: + MgmtEth0/0/CPU0/0: + ipv4: "172.16.1.5" + Loopback0: + ipv4: "192.168.1.5" + ipv6: "2001:db8:1::5" + GigabitEthernet0/0/0/2: + ipv4: "10.1.0.38" + ipv6: "2001:db8::13" + peer: "ut-slc-pe01" + peer_int: "GigabitEthernet0/0/0/2" + GigabitEthernet0/0/0/3: + ipv6: "2001:db8::16" + peer: "ut-slc-pe01" + peer_int: "GigabitEthernet0/0/0/1" + type: "core" diff --git a/tests/fixtures/test_validators/inventory/inventory.yml b/tests/fixtures/test_validators/inventory/inventory.yml new file mode 100644 index 0000000..55f9820 --- /dev/null +++ b/tests/fixtures/test_validators/inventory/inventory.yml @@ -0,0 +1,19 @@ +--- +all: + vars: + ansible_network_os: "iosxr" + ansible_user: "cisco" + ansible_password: "cisco" + ansible_connection: "netconf" + ansible_netconf_ssh_config: true + children: + pe_rtrs: + hosts: + az_phx_pe01: + ansible_host: "172.16.1.1" + az_phx_pe02: + ansible_host: "172.16.1.2" + p_rtrs: + hosts: + co_den_p01: + ansible_host: "172.16.1.3" diff --git a/tests/fixtures/test_validators/validators/check_interfaces.py b/tests/fixtures/test_validators/validators/check_interfaces.py new file mode 100644 index 0000000..960a9db --- /dev/null +++ b/tests/fixtures/test_validators/validators/check_interfaces.py @@ -0,0 +1,13 @@ +"""Test validator for JmesPathModelValidation class""" +from schema_enforcer.schemas.validator import JmesPathModelValidation + + +class CheckInterface(JmesPathModelValidation): # pylint: disable=too-few-public-methods + """Test validator for JmesPathModelValidation class""" + + top_level_properties = ["interfaces"] + id = "CheckInterface" # pylint: disable=invalid-name + left = "interfaces.*[@.type=='core'][] | length([?@])" + right = 2 + operator = "gte" + error = "Less than two core interfaces" diff --git a/tests/fixtures/test_validators/validators/check_interfaces_ipv4.py b/tests/fixtures/test_validators/validators/check_interfaces_ipv4.py new file mode 100644 index 0000000..37348b6 --- /dev/null +++ b/tests/fixtures/test_validators/validators/check_interfaces_ipv4.py @@ -0,0 +1,14 @@ +"""Test validator for JmesPathModelValidation class""" +import jmespath +from schema_enforcer.schemas.validator import JmesPathModelValidation + + +class CheckInterfaceIPv4(JmesPathModelValidation): # pylint: disable=too-few-public-methods + """Test validator for JmesPathModelValidation class""" + + top_level_properties = ["interfaces"] + id = "CheckInterfaceIPv4" # pylint: disable=invalid-name + left = "interfaces.*[@.type=='core'][] | length([?@])" + right = jmespath.compile("interfaces.* | length([?@.type=='core'][].ipv4)") + operator = "eq" + error = "All core interfaces do not have IPv4 addresses" diff --git a/tests/fixtures/test_validators/validators/check_peers.py b/tests/fixtures/test_validators/validators/check_peers.py new file mode 100644 index 0000000..ccea521 --- /dev/null +++ b/tests/fixtures/test_validators/validators/check_peers.py @@ -0,0 +1,42 @@ +"""Test validator for ModelValidation class""" +from schema_enforcer.schemas.validator import BaseValidation + + +def ansible_hostname(hostname: str): + """Convert hostname to ansible format""" + return hostname.replace("-", "_") + + +def normal_hostname(hostname: str): + """Convert ansible hostname to normal format""" + return hostname.replace("_", "-") + + +class CheckPeers(BaseValidation): # pylint: disable=too-few-public-methods + """ + Validate that peer and peer_int are defined properly on both sides of a connection + + Requires full Ansible host_vars as data which is currently unsupported in schema-enforcer + """ + + id = "CheckPeers" + + def validate(self, data: dict, strict: bool): + for host in data: + for interface, int_cfg in data[host]["interfaces"].items(): + if "peer" not in int_cfg: + continue + peer = int_cfg["peer"] + if "peer_int" not in int_cfg: + self.add_validation_error("Peer interface is not defined") + continue + peer_int = int_cfg["peer_int"] + peer = ansible_hostname(peer) + if peer not in data: + continue + peer_match = data[peer]["interfaces"][peer_int]["peer"] == normal_hostname(host) + peer_int_match = data[peer]["interfaces"][peer_int]["peer_int"] == interface + if peer_match and peer_int_match: + self.add_validation_pass() + else: + self.add_validation_error("Peer information does not match.") diff --git a/tests/test_instances_instance_file.py b/tests/test_instances_instance_file.py index 1e1c07b..4d5ff73 100644 --- a/tests/test_instances_instance_file.py +++ b/tests/test_instances_instance_file.py @@ -6,7 +6,7 @@ import pytest from schema_enforcer.schemas.manager import SchemaManager -from schema_enforcer.instances.file import InstanceFile +from schema_enforcer.instances.file import InstanceFile, InstanceFileManager from schema_enforcer.validation import ValidationResult from schema_enforcer.config import Settings @@ -14,8 +14,6 @@ CONFIG_DATA = { "main_directory": os.path.join(FIXTURES_DIR, "schema"), - # "definitions_directory": - # "schema_directory": "data_file_search_directories": [os.path.join(FIXTURES_DIR, "hostvars")], "schema_mapping": {"dns.yml": ["schemas/dns_servers"]}, } @@ -40,7 +38,7 @@ def if_w_matches(): if_instance = InstanceFile( root=os.path.join(FIXTURES_DIR, "hostvars", "eng-london-rt1"), filename="dns.yaml", - matches=["schemas/dns_servers"], + matches={"schemas/dns_servers"}, ) return if_instance @@ -70,6 +68,13 @@ def schema_manager(): return schema_manager +@pytest.fixture +def ifm(): + """Instance of InstanceFileManager.""" + ifm = InstanceFileManager(config=Settings(**CONFIG_DATA)) + return ifm + + def test_init(if_wo_matches, if_w_matches, if_w_extended_matches): """ Tests initialization of InstanceFile object @@ -79,17 +84,21 @@ def test_init(if_wo_matches, if_w_matches, if_w_extended_matches): if_wo_matches (InstanceFile): Initialized InstanceFile pytest fixture if_w_extended_matches (InstanceFile): Initizlized InstanceFile pytest fixture """ - assert if_wo_matches.matches == [] + assert if_wo_matches.matches == set() assert not if_wo_matches.data assert if_wo_matches.path == os.path.join(FIXTURES_DIR, "hostvars", "chi-beijing-rt1") assert if_wo_matches.filename == "syslog.yml" - assert if_w_matches.matches == ["schemas/dns_servers"] + assert if_w_matches.matches == { + "schemas/dns_servers", + } assert not if_w_matches.data assert if_w_matches.path == os.path.join(FIXTURES_DIR, "hostvars", "eng-london-rt1") assert if_w_matches.filename == "dns.yaml" - assert if_w_extended_matches.matches == ["schemas/ntp"] + assert if_w_extended_matches.matches == { + "schemas/ntp", + } assert not if_w_extended_matches.data assert if_w_extended_matches.path == os.path.join(FIXTURES_DIR, "hostvars", "eng-london-rt1") assert if_w_extended_matches.filename == "ntp.yaml" @@ -102,10 +111,14 @@ def test_get_content(if_w_matches): Args: if_w_matches (InstanceFile): Initialized InstanceFile pytest fixture """ - content = if_w_matches.get_content() + content = if_w_matches._get_content() # pylint: disable=protected-access assert content["dns_servers"][0]["address"] == "10.6.6.6" assert content["dns_servers"][1]["address"] == "10.7.7.7" + raw_content = if_w_matches._get_content(structured=False) # pylint: disable=protected-access + with open(os.path.join(FIXTURES_DIR, "hostvars", "eng-london-rt1", "dns.yaml"), "r") as fhd: + assert raw_content == fhd.read() + def test_validate(if_w_matches, schema_manager): """ @@ -127,3 +140,12 @@ def test_validate(if_w_matches, schema_manager): assert isinstance(strict_errs[0], ValidationResult) assert strict_errs[0].result == "FAIL" assert strict_errs[0].message == "Additional properties are not allowed ('fun_extr_attribute' was unexpected)" + + +def test_add_matches_by_property_automap(if_wo_matches, schema_manager): + """Tests add_matches_by_property_automap method of InstanceFile class.""" + assert not if_wo_matches.matches + assert if_wo_matches.top_level_properties == {"syslog_servers"} + assert if_wo_matches._top_level_properties == {"syslog_servers"} # pylint: disable=protected-access + if_wo_matches.add_matches_by_property_automap(schema_manager) + assert if_wo_matches.matches == set(["schemas/syslog_servers"]) diff --git a/tests/test_jsonschema.py b/tests/test_jsonschema.py index 60d5f30..4c9ce39 100644 --- a/tests/test_jsonschema.py +++ b/tests/test_jsonschema.py @@ -1,7 +1,6 @@ # pylint: disable=redefined-outer-name """Tests to validate functions defined in jsonschema.py""" import os - import pytest from schema_enforcer.schemas.jsonschema import JsonSchema @@ -71,28 +70,43 @@ def test_validate(schema_instance, valid_instance_data, invalid_instance_data, s Args: schema_instance (JsonSchema): Instance of JsonSchema class """ - validation_results = list(schema_instance.validate(data=valid_instance_data)) + schema_instance.validate(data=valid_instance_data) + validation_results = schema_instance.get_results() assert len(validation_results) == 1 assert validation_results[0].schema_id == LOADED_SCHEMA_DATA.get("$id") assert validation_results[0].result == RESULT_PASS assert validation_results[0].message is None + schema_instance.clear_results() - validation_results = list(schema_instance.validate(data=invalid_instance_data)) + schema_instance.validate(data=invalid_instance_data) + validation_results = schema_instance.get_results() assert len(validation_results) == 1 assert validation_results[0].schema_id == LOADED_SCHEMA_DATA.get("$id") assert validation_results[0].result == RESULT_FAIL assert validation_results[0].message == "True is not of type 'string'" assert validation_results[0].absolute_path == ["dns_servers", "0", "address"] + schema_instance.clear_results() - validation_results = list(schema_instance.validate(data=strict_invalid_instance_data, strict=False)) + schema_instance.validate(data=strict_invalid_instance_data, strict=False) + validation_results = schema_instance.get_results() assert validation_results[0].result == RESULT_PASS + schema_instance.clear_results() - validation_results = list(schema_instance.validate(data=strict_invalid_instance_data, strict=True)) + schema_instance.validate(data=strict_invalid_instance_data, strict=True) + validation_results = schema_instance.get_results() assert validation_results[0].result == RESULT_FAIL assert ( validation_results[0].message == "Additional properties are not allowed ('fun_extr_attribute' was unexpected)" ) + schema_instance.clear_results() + + @staticmethod + def test_format_checkers(schema_instance, data_instance, expected_error_message): + """Test format checkers""" + validation_results = list(schema_instance.validate(data=data_instance)) + assert validation_results[0].result == RESULT_FAIL + assert validation_results[0].message == expected_error_message @staticmethod def test_validate_to_dict(schema_instance, valid_instance_data): @@ -117,6 +131,12 @@ def test_get_strict_validator(): @staticmethod def test_check_if_valid(): - pass + schema_data = load_file(os.path.join(FIXTURES_DIR, "schema", "schemas", "invalid.yml")) + schema_instance = JsonSchema( + schema=schema_data, filename="invalid.yml", root=os.path.join(FIXTURES_DIR, "schema", "schemas"), + ) + results = schema_instance.check_if_valid() + for result in results: + assert not result.passed() # def test_get_id(): diff --git a/tests/test_schemas_schema_manager.py b/tests/test_schemas_schema_manager.py index 78378c5..cec6bf1 100644 --- a/tests/test_schemas_schema_manager.py +++ b/tests/test_schemas_schema_manager.py @@ -4,6 +4,7 @@ import pytest from schema_enforcer.schemas.manager import SchemaManager from schema_enforcer.config import Settings +from schema_enforcer.exceptions import InvalidJSONSchema FIXTURE_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "fixtures") @@ -37,3 +38,47 @@ def test_dump(capsys, schema_manager, schema_id, result_file): schema_manager.dump_schema(schema_id) captured = capsys.readouterr() assert captured.out == expected + + +def test_invalid(): + """ Test validates that SchemaManager reports an error when an invalid schema is loaded. """ + config = { + "main_directory": os.path.join(FIXTURE_DIR, "test_manager", "invalid", "schema"), + "data_file_search_directories": [os.path.join(FIXTURE_DIR, "hostvars")], + "schema_mapping": {"dns.yml": ["schemas/dns_servers"]}, + } + with pytest.raises(InvalidJSONSchema) as e: # pylint: disable=invalid-name + schema_manager = SchemaManager(config=Settings(**config)) # noqa pylint: disable=unused-variable + expected_error = """Invalid JSONschema file: invalid.yml - ["'bla bla bla bla' is not of type 'object', 'boolean'", "'integer' is not of type 'object', 'boolean'"]""" + assert expected_error in str(e) + + +def test_generate_invalid(capsys): + """ Test validates that generate_invalid_test_expected generates the correct data. """ + config = { + "main_directory": os.path.join(FIXTURE_DIR, "test_manager", "invalid_generate", "schema"), + } + schema_id = "schemas/test" + + schema_manager = SchemaManager(config=Settings(**config)) + schema_manager.generate_invalid_tests_expected(schema_id) + + invalid_dir = os.path.join(config["main_directory"], "tests", "test", "invalid") + invalid_tests = ["invalid_type1", "invalid_type2"] + + for test in invalid_tests: + test_dir = os.path.join(invalid_dir, test) + with open(os.path.join(test_dir, "exp_results.yml")) as exp_file: + expected = exp_file.read() + with open(os.path.join(test_dir, "results.yml")) as gen_file: + generated = gen_file.read() + assert expected == generated + + test_schema = schema_manager.schemas.get(schema_id) + # Clear results as these would not be carried over into subsequent run of --check + test_schema.clear_results() + # Ignore earlier output + capsys.readouterr() + schema_manager.test_schemas() + captured = capsys.readouterr() + assert "ALL SCHEMAS ARE VALID" in captured.out diff --git a/tests/test_schemas_validator.py b/tests/test_schemas_validator.py new file mode 100644 index 0000000..cac4be4 --- /dev/null +++ b/tests/test_schemas_validator.py @@ -0,0 +1,155 @@ +"""Tests for validator plugin support.""" +# pylint: disable=redefined-outer-name +import os +import pytest +from schema_enforcer.ansible_inventory import AnsibleInventory +import schema_enforcer.schemas.validator as v + +FIXTURE_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "fixtures", "test_validators") + + +@pytest.fixture +def inventory(): + """Fixture for Ansible inventory used in tests.""" + inventory_dir = os.path.join(FIXTURE_DIR, "inventory") + + inventory = AnsibleInventory(inventory_dir) + return inventory + + +@pytest.fixture +def host_vars(inventory): + """Fixture for providing Ansible host_vars as a consolidated dict.""" + hosts = inventory.get_hosts_containing() + host_vars = dict() + for host in hosts: + hostname = host.get_vars()["inventory_hostname"] + host_vars[hostname] = inventory.get_host_vars(host) + return host_vars + + +@pytest.fixture(scope="session") +def validators(): + """Test that validator files are loaded and appended to base class validator list.""" + validator_path = os.path.join(FIXTURE_DIR, "validators") + return v.load_validators(validator_path) + + +def test_jmespathvalidation_pass(host_vars, validators): + """ + Validator: "interfaces.*[@.type=='core'][] | length([?@])" gte 2 + Test expected to pass for az_phx_pe01 with two core interfaces: + interfaces: + GigabitEthernet0/0/0/0: + type: "core" + GigabitEthernet0/0/0/1: + type: "core" + """ + validator = validators["CheckInterface"] + validator.validate(host_vars["az_phx_pe01"], False) + result = validator.get_results() + assert result[0].passed() + validator.clear_results() + + +def test_jmespathvalidation_fail(host_vars, validators): + """ + Validator: "interfaces.*[@.type=='core'][] | length([?@])" gte 2 + Test expected to fail for az_phx_pe02 with one core interface: + interfaces: + GigabitEthernet0/0/0/0: + type: "core" + GigabitEthernet0/0/0/1: + type: "access" + """ + validator = validators["CheckInterface"] + validator.validate(host_vars["az_phx_pe02"], False) + result = validator.get_results() + assert not result[0].passed() + validator.clear_results() + + +def test_jmespathvalidation_with_compile_pass(host_vars, validators): + """ + Validator: "interfaces.*[@.type=='core'][] | length([?@])" eq jmespath.compile("interfaces.* | length([?@.type=='core'][].ipv4)") + Test expected to pass for az_phx_pe01 where all core interfaces have IPv4 addresses: + GigabitEthernet0/0/0/0: + ipv4: "10.1.0.1" + ipv6: "2001:db8::" + peer: "az-phx-pe02" + peer_int: "GigabitEthernet0/0/0/0" + type: "core" + GigabitEthernet0/0/0/1: + ipv4: "10.1.0.37" + ipv6: "2001:db8::12" + peer: "co-den-p01" + peer_int: "GigabitEthernet0/0/0/2" + type: "core" + """ + validator = validators["CheckInterfaceIPv4"] + validator.validate(host_vars["az_phx_pe01"], False) + result = validator.get_results() + assert result[0].passed() + validator.clear_results() + + +def test_jmespathvalidation_with_compile_fail(host_vars, validators): + """ + Validator: "interfaces.*[@.type=='core'][] | length([?@])" eq jmespath.compile("interfaces.* | length([?@.type=='core'][].ipv4)") + Test expected to fail for co_den_p01 where core interface is missing an IPv4 addresses: + GigabitEthernet0/0/0/3: + ipv6: "2001:db8::16" + peer: "ut-slc-pe01" + peer_int: "GigabitEthernet0/0/0/1" + type: "core" + """ + validator = validators["CheckInterfaceIPv4"] + validator.validate(host_vars["co_den_p01"], False) + result = validator.get_results() + assert not result[0].passed() + validator.clear_results() + + +def test_modelvalidation_pass(host_vars, validators): + """ + Validator: Checks that peer and peer_int match between peers + Test expected to pass for az_phx_pe01/az_phx_pe02: + + az_phx_pe01: + GigabitEthernet0/0/0/0: + peer: "az-phx-pe02" + peer_int: "GigabitEthernet0/0/0/0" + + az_phx_pe02: + GigabitEthernet0/0/0/0: + peer: "az-phx-pe01" + peer_int: "GigabitEthernet0/0/0/0" + """ + validator = validators["CheckPeers"] + validator.validate(host_vars, False) + result = validator.get_results() + assert result[0].passed() + assert result[2].passed() + validator.clear_results() + + +def test_modelvalidation_fail(host_vars, validators): + """ + Validator: Checks that peer and peer_int match between peers + + Test expected to fail for az_phx_pe01/co_den_p01: + + az_phx_pe01: + GigabitEthernet0/0/0/1: + peer: "co-den-p01" + peer_int: "GigabitEthernet0/0/0/2" + + co_den_p01: + GigabitEthernet0/0/0/2: + peer: ut-slc-pe01 + peer_int: GigabitEthernet0/0/0/2 + """ + validator = validators["CheckPeers"] + validator.validate(host_vars, False) + result = validator.get_results() + assert not result[1].passed()