diff --git a/webinars/panorama-ssot/.gitignore b/webinars/panorama-ssot/.gitignore new file mode 100644 index 0000000..cbb9133 --- /dev/null +++ b/webinars/panorama-ssot/.gitignore @@ -0,0 +1,308 @@ +# Ansible Retry Files +*.retry + +# Swap files +*.swp + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# Editor +.vscode/ + +### macOS ### +# General +.DS_Store +.AppleDouble +.LSOverride + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +### Windows ### +# Windows thumbnail cache files +Thumbs.db +Thumbs.db:encryptable +ehthumbs.db +ehthumbs_vista.db + +# Dump file +*.stackdump + +# Folder config file +[Dd]esktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Windows Installer files +*.cab +*.msi +*.msix +*.msm +*.msp + +# Windows shortcuts +*.lnk + +### PyCharm ### +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider +# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 + +# User-specific stuff +.idea/**/workspace.xml +.idea/**/tasks.xml +.idea/**/usage.statistics.xml +.idea/**/dictionaries +.idea/**/shelf + +# Generated files +.idea/**/contentModel.xml + +# Sensitive or high-churn files +.idea/**/dataSources/ +.idea/**/dataSources.ids +.idea/**/dataSources.local.xml +.idea/**/sqlDataSources.xml +.idea/**/dynamic.xml +.idea/**/uiDesigner.xml +.idea/**/dbnavigator.xml + +# Gradle +.idea/**/gradle.xml +.idea/**/libraries + +# Gradle and Maven with auto-import +# When using Gradle or Maven with auto-import, you should exclude module files, +# since they will be recreated, and may cause churn. Uncomment if using +# auto-import. +# .idea/artifacts +# .idea/compiler.xml +# .idea/jarRepositories.xml +# .idea/modules.xml +# .idea/*.iml +# .idea/modules +# *.iml +# *.ipr + +# CMake +cmake-build-*/ + +# Mongo Explorer plugin +.idea/**/mongoSettings.xml + +# File-based project format +*.iws + +# IntelliJ +out/ + +# mpeltonen/sbt-idea plugin +.idea_modules/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Cursive Clojure plugin +.idea/replstate.xml + +# Crashlytics plugin (for Android Studio and IntelliJ) +com_crashlytics_export_strings.xml +crashlytics.properties +crashlytics-build.properties +fabric.properties + +# Editor-based Rest Client +.idea/httpRequests + +# Android studio 3.1+ serialized cache file +.idea/caches/build_file_checksums.ser + +### PyCharm Patch ### +# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 + +# *.iml +# modules.xml +# .idea/misc.xml +# *.ipr + +# Sonarlint plugin +# https://plugins.jetbrains.com/plugin/7973-sonarlint +.idea/**/sonarlint/ + +# SonarQube Plugin +# https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin +.idea/**/sonarIssues.xml + +# Markdown Navigator plugin +# https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced +.idea/**/markdown-navigator.xml +.idea/**/markdown-navigator-enh.xml +.idea/**/markdown-navigator/ + +# Cache file creation bug +# See https://youtrack.jetbrains.com/issue/JBR-2257 +.idea/$CACHE_FILE$ + +# CodeStream plugin +# https://plugins.jetbrains.com/plugin/12206-codestream +.idea/codestream.xml + +### vscode ### +.vscode/* +*.code-workspace + +# Rando +creds.env +development/*.txt + +# Invoke overrides +invoke.yml + +# Docs +docs/README.md +docs/CHANGELOG.md +public diff --git a/webinars/panorama-ssot/FAQ.md b/webinars/panorama-ssot/FAQ.md new file mode 100644 index 0000000..318b08d --- /dev/null +++ b/webinars/panorama-ssot/FAQ.md @@ -0,0 +1 @@ +# Frequently Asked Questions diff --git a/webinars/panorama-ssot/GETTING_STARTED.md b/webinars/panorama-ssot/GETTING_STARTED.md new file mode 100644 index 0000000..e444cdb --- /dev/null +++ b/webinars/panorama-ssot/GETTING_STARTED.md @@ -0,0 +1,349 @@ +# Decide On Development Environment + +- [Decide On Development Environment](#decide-on-development-environment) + - [Introduction](#introduction) + - [Poetry](#poetry) + - [Full Docker Development Environment](#full-docker-development-environment) + - [Invoke](#invoke) + - [Invoke - Building the Docker Image](#invoke---building-the-docker-image) + - [Invoke - Starting the Development Environment](#invoke---starting-the-development-environment) + - [Invoke - Creating a Superuser](#invoke---creating-a-superuser) + - [Invoke - Stopping the Development Environment](#invoke---stopping-the-development-environment) + - [Real-Time Updates? How Cool!](#real-time-updates-how-cool) + - [Docker Magic](#docker-magic) + - [Docker Logs](#docker-logs) + - [To Rebuild or Not to Rebuild](#to-rebuild-or-not-to-rebuild) + - [Updating Environment Variables](#updating-environment-variables) + - [Installing Additional Python Packages](#installing-additional-python-packages) + - [Installing Additional Nautobot Plugins](#installing-additional-nautobot-plugins) + - [Updating Python Version](#updating-python-version) + - [Updating Nautobot Version](#updating-nautobot-version) + - [Local Development Environment](#local-development-environment) + - [Other Miscellaneous Commands To Know](#other-miscellaneous-commands-to-know) + - [Python Shell](#python-shell) + - [Tests](#tests) + +## Introduction + +The cookie provides the ability to develop and manage the Nautobot server locally (with supporting services being *Dockerized*) or using only Docker containers to manage Nautobot. The main difference between the two environments is the ability to debug and use **pdb** when developing locally. Debugging with **pdb** within the Docker container is more complicated, but can still be accomplished by either exec'ing into the container or attaching your IDE to the container and running the Nautobot service manually within the container. + +The upside to having the Nautobot service handled by Docker rather than locally is that you do not have to manage the Nautobot server and the [Docker logs](#docker-logs) provide the majority information you will need to help troubleshoot while getting started quickly and not requiring you to perform several manual steps to get started and remember to have the Nautobot server running or having it run in a separate terminal while you develop. Ultimately, the decision is yours as to how you want to develop, but it was agreed it would be a good idea to provide pros and cons for each development environment. + +> The local environment still uses Docker containers for the supporting services (Postgres, Redis, and RQ Worker), but the Nautobot server is handled locally by you, the developer. + +Follow the directions below for the specific development environment that you choose. + +## Poetry + +Poetry is used in lieu of the "virtualenv" commands and is used for either environment. The virtual environment will provide most of the Python packages required to manage the development environment such as **Invoke**, but see the [Local Development Environment](#local-development-environment) section to see how to install Nautobot if you're going to be developing locally. To get started, run the following commands: + +```bash +➜ poetry install +➜ poetry shell +``` + +The first command creates the virtual environment through Poetry and installs all relevant dependencies, as outlined in the `pyproject.toml` file. + +The second command puts your shell session into the virtual environment, so all commands ran going forward are from within the virtual environment. (This is similar to running the `source venv/bin/activate` command with virtualenvs). + +## Full Docker Development Environment + +### Invoke + +The beauty of **Invoke** is that the Cookiecutter template provides several simple CLI commands to get developing fast. You'll use a few `invoke` commands to get your environment up and running. + +#### Invoke - Building the Docker Image + +The first thing you need to do is build the necessary Docker image for Nautobot that installs the specific **nautobot_ver**. The image is used for Nautobot and the RQ worker service used by Docker Compose. + +```bash +➜ invoke build +... +#14 exporting to image +#14 sha256:e8c613e07b0b7ff33893b694f7759a10d42e180f2b4dc349fb57dc6b71dcab00 +#14 exporting layers +#14 exporting layers 1.2s done +#14 writing image sha256:2d524bc1665327faa0d34001b0a9d2ccf450612bf8feeb969312e96a2d3e3503 done +#14 naming to docker.io/nautobot-ssot-panorama/nautobot:latest-py3.7 done +``` + +### Invoke - Starting the Development Environment + +Next, you need to start up your Docker containers. + +```bash +➜ invoke start +Starting Nautobot in detached mode... +Running docker-compose command "up --detach" +Creating network "nautobot_ssot_panorama_default" with the default driver +Creating volume "nautobot_ssot_panorama_postgres_data" with default driver +Creating nautobot_ssot_panorama_redis_1 ... +Creating nautobot_ssot_panorama_docs_1 ... +Creating nautobot_ssot_panorama_postgres_1 ... +Creating nautobot_ssot_panorama_postgres_1 ... done +Creating nautobot_ssot_panorama_redis_1 ... done +Creating nautobot_ssot_panorama_nautobot_1 ... +Creating nautobot_ssot_panorama_docs_1 ... done +Creating nautobot_ssot_panorama_nautobot_1 ... done +Creating nautobot_ssot_panorama_worker_1 ... +Creating nautobot_ssot_panorama_worker_1 ... done +Docker Compose is now in the Docker CLI, try `docker compose up` +``` + +This will start all of the Docker containers used for hosting Nautobot. Once the containers are up, you should be able to open up a web browser, and view the homepage at [http://localhost:8080](http://localhost:8080). + +> NOTE: Sometimes the containers take a minute to fully spin up. If the page doesn't load right away, wait a minute and try again. + +```bash +➜ docker ps +****CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +ee90fbfabd77 nautobot-ssot-panorama/nautobot:latest-py3.7 "nautobot-server rqw…" 16 seconds ago Up 13 seconds nautobot_ssot_panorama_worker_1 +b8adb781d013 nautobot-ssot-panorama/nautobot:latest-py3.7 "/docker-entrypoint.…" 20 seconds ago Up 15 seconds 0.0.0.0:8080->8080/tcp, :::8080->8080/tcp nautobot_ssot_panorama_nautobot_1 +d64ebd60675d nautobot-ssot-panorama/nautobot:latest-py3.7 "mkdocs serve -v -a …" 25 seconds ago Up 18 seconds 0.0.0.0:8001->8080/tcp, :::8001->8080/tcp nautobot_ssot_panorama_docs_1 +e72d63129b36 postgres:13-alpine "docker-entrypoint.s…" 25 seconds ago Up 19 seconds 0.0.0.0:5432->5432/tcp, :::5432->5432/tcp nautobot_ssot_panorama_postgres_1 +96c6ff66997c redis:6-alpine "docker-entrypoint.s…" 25 seconds ago Up 21 seconds 0.0.0.0:6379->6379/tcp, :::6379->6379/tcp nautobot_ssot_panorama_redis_1 +``` + +You should see the following containers running after running `invoke start` at this time of writing. + +### Invoke - Creating a Superuser + +The Nautobot development image will automatically provision a super user when specifying the following variables within `creds.env` which is the default when copying `creds.example.env` to `creds.env`. + +- **NAUTOBOT_CREATE_SUPERUSER=true** +- **NAUTOBOT_SUPERUSER_API_TOKEN=0123456789abcdef0123456789abcdef01234567** +- **NAUTOBOT_SUPERUSER_PASSWORD=admin** + +> NOTE: The default username is **admin**, but can be overridden by specifying **NAUTOBOT_SUPERUSER_USERNAME**. + +If you need to create additional superusers, run the follow commands. + +```bash +➜ invoke createsuperuser +Running docker-compose command "ps --services --filter status=running" +Running docker-compose command "exec nautobot nautobot-server createsuperuser --username admin" +Error: That username is already taken. +Username: ntc +Email address: ntc@networktocode.com +Password: +Password (again): +Superuser created successfully. +``` + +### Invoke - Stopping the Development Environment + +The last command to know for now is `invoke stop`. + +```bash +➜ invoke stop +Stopping Nautobot... +Running docker-compose command "down" +Stopping nautobot_ssot_panorama_worker_1 ... +Stopping nautobot_ssot_panorama_nautobot_1 ... +Stopping nautobot_ssot_panorama_docs_1 ... +Stopping nautobot_ssot_panorama_redis_1 ... +Stopping nautobot_ssot_panorama_postgres_1 ... +Stopping nautobot_ssot_panorama_worker_1 ... done +Stopping nautobot_ssot_panorama_nautobot_1 ... done +Stopping nautobot_ssot_panorama_postgres_1 ... done +Stopping nautobot_ssot_panorama_redis_1 ... done +Stopping nautobot_ssot_panorama_docs_1 ... done +Removing nautobot_ssot_panorama_worker_1 ... +Removing nautobot_ssot_panorama_nautobot_1 ... +Removing nautobot_ssot_panorama_docs_1 ... +Removing nautobot_ssot_panorama_redis_1 ... +Removing nautobot_ssot_panorama_postgres_1 ... +Removing nautobot_ssot_panorama_postgres_1 ... done +Removing nautobot_ssot_panorama_docs_1 ... done +Removing nautobot_ssot_panorama_worker_1 ... done +Removing nautobot_ssot_panorama_redis_1 ... done +Removing nautobot_ssot_panorama_nautobot_1 ... done +Removing network nautobot_ssot_panorama_default +``` + +This will safely shut down all of your running Docker containers for this project. When you are ready to spin containers back up, it is as simple as running `invoke start` again like in [**Invoke - Starting the Development Environment**](#invoke---starting-the-development-environment). + +> NOTE: If you're wanting to reset the database and configuration settings, you can use the `invoke destroy` command, but it will result in data loss so make sure that is what you want to do. + +### Real-Time Updates? How Cool! + +Your environment should now be fully setup, all necessary Docker containers are created and running, and you're logged into Nautobot in your web browser. Now what? + +Now you can start developing your plugin in the folder generated for you by Cookiecutter. + +## Docker Magic + +The magic here is the root directory is mounted inside your Docker containers when built and ran, so **any** changes made to the files in here are directly updated to the Nautobot plugin code running in Docker. This means that as you modify the code in your `nautobot-plugin` folder (or whatever you named your plugin when generating it via Cookiecutter), the changes will be instantly updated in Nautobot. + +> NOTE: There are a few exceptions to this, as outlined in the section [To Rebuild or Not To Rebuild](#to-rebuild-or-not-to-rebuild). + +The backend Django process is setup to automatically reload itself (it only takes a couple of seconds) every time a file is updated (saved). So for example, if you were to update one of the files like `tables.py`, then save it, the changes will be visible right away in the web browser! + +> NOTE: You may get connection refused while Django reloads, but it should be refreshed fairly quickly. + +### Docker Logs + +When trying to debug an issue, one helpful thing you can look at are the logs within the Docker containers. + +```bash +➜ docker logs -f +``` + +> NOTE: The `-f` tag will keep the logs open, and output them in realtime as they are generated. + +So for example, our plugin is named `nautobot-ssot-panorama`, the command would most likely be `docker logs nautobot_ssot_panorama_nautobot_1 -f`. You can find the name of all running containers via `docker ps`. + +If you want to view the logs specific to the worker container, simply use the name of that container instead. + +## To Rebuild or Not to Rebuild + +Most of the time, you will not need to rebuild your images. Simply running `invoke start` and `invoke stop` is enough to keep your environment going. + +However there are a couple of instances when you will want to. + +### Updating Environment Variables + +To add environment variables to your containers, thus allowing Nautobot to use them, you will update/add them in the `development/dev.env` file. However, doing so is considered updating the underlying container shell, instead of Django (which auto restarts itself on changes). + +To get new environment variables to take effect, you will need stop any running images, rebuild the images, then restart them. This can easily be done with 3 commands: + +```bash +➜ invoke stop +➜ invoke build +➜ invoke start +``` + +Once completed, the new/updated environment variables should now be live. + +### Installing Additional Python Packages + +If you want your plugin to leverage another available Nautobot plugin or another Python package, you can easily add them into your Docker environment. + +```bash +➜ poetry shell +➜ poetry add netutils +``` + +Once the dependencies are resolved, stop the existing containers, rebuild the Docker image, and then start all containers again. + +```bash +➜ invoke stop +➜ invoke build +➜ invoke start +``` + +### Installing Additional Nautobot Plugins + +Let's say for example you want the new plugin you're creating to integrate into Slack. To do this, you will want to integrate into the existing Nautobot ChatOps Plugin. + +```bash +➜ poetry shell +➜ poetry add nautobot-chatops-plugin +``` + +Once you activate the virtual environment via Poetry, you then tell Poetry to install the new plugin. + +Before you continue, you'll need to update the file `development/nautobot_config.py` accordingly with the name of the new plugin under `PLUGINS` and any relevant settings as necessary for the plugin under `PLUGINS_CONFIG`. Since you're modifying the underlying OS (not just Django files), you need to rebuild the image. This is a similar process to updating environment variables, which was explained earlier. + +```bash +➜ invoke stop +➜ invoke build +➜ invoke start +``` + +Once the containers are up and running, you should now see the new plugin installed in your Nautobot instance. + +You can even launch an `ngrok` service locally on your laptop, pointing to port 8080 (such as for chatops development), and it will point traffic directly to your Docker images. How cool! + +### Updating Python Version + +To update the Python version, you can update it within `tasks.py`. + +```python +namespace = Collection("nautobot_ssot_panorama") +namespace.configure( + { + "nautobot_ssot_panorama": { + ... + "python_ver": "3.7", + ... + } + } +) +``` + +Or set the `INVOKE_NAUTOBOT_SSOT_PANORAMA_PYTHON_VER` variable + +### Updating Nautobot Version + +To update the Python version, you can update it within `tasks.py`. + +```python +namespace = Collection("nautobot_ssot_panorama") +namespace.configure( + { + "nautobot_ssot_panorama": { + ... + "nautobot_ver": "1.0.2", + ... + } + } +) +``` + +Or set the `INVOKE_NAUTOBOT_SSOT_PANORAMA_NAUTOBOT_VER` variable + +## Local Development Environment + +Refer back to the [README](./README.md) for developing locally. + +## Other Miscellaneous Commands To Know + +### Python Shell + +To drop into a Django shell for Nautobot (in the Docker container) run: + +```bash +➜ invoke nbshell +``` + +This is the same as running: + +```bash +➜ invoke cli +➜ nautobot-server nbshell +``` + +### iPython Shell Plus + +Django also has a more advanced shell that uses iPython and that will automatically import all the models: + +```bash +➜ invoke shell-plus +``` + +This is the same as running: + +```bash +➜ invoke cli +➜ nautobot-server shell_plus +``` + +### Tests + +To run tests against your code, you can run all of the tests that TravisCI runs against any new PR with: + +```bash +➜ invoke tests +``` + +To run an individual test, you can run any or all of the following: + +```bash +➜ invoke unittest +➜ invoke bandit +➜ invoke black +➜ invoke flake8 +➜ invoke pydocstyle +➜ invoke pylint +``` diff --git a/webinars/panorama-ssot/LICENSE b/webinars/panorama-ssot/LICENSE new file mode 100644 index 0000000..0d375fd --- /dev/null +++ b/webinars/panorama-ssot/LICENSE @@ -0,0 +1,15 @@ +Apache Software License 2.0 + +Copyright (c) 2022, Network to Code, LLC + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/webinars/panorama-ssot/README.md b/webinars/panorama-ssot/README.md new file mode 100644 index 0000000..e217d7b --- /dev/null +++ b/webinars/panorama-ssot/README.md @@ -0,0 +1,199 @@ +# Nautobot SSoT Panorama SSoT + +A plugin for [Nautobot](https://github.com/nautobot/nautobot). + +The term SSoT, or Single Source of Truth, refers to the intention of using Nautobot to consolidate data from disparate Systems of Record to create a single resource for all automation needs. This is done by extending the [Nautobot SSoT framework](https://github.com/nautobot/nautobot-plugin-ssot) which uses the DiffSync library. This plug-in is built with the capability in mind to import and export data from your desired System of Record. + +## Installation + +The plugin is available as a Python package in pypi and can be installed with pip + +```shell +pip install nautobot-ssot-panorama +``` + +> The plugin is compatible with Nautobot >= 1.4.0 and required Nautobot Firewall Models >= 1.1.0. + +To ensure Nautobot SSoT Panorama is automatically re-installed during future upgrades, create a file named `local_requirements.txt` (if not already existing) in the Nautobot root directory (alongside `requirements.txt`) and list the `nautobot-ssot-panorama` package: + +```no-highlight +# echo nautobot-ssot-panorama >> local_requirements.txt +``` + +Once installed, the plugin needs to be enabled in your `nautobot_config.py` + +```python +# In your nautobot_config.py +PLUGINS = ["nautobot_ssot", "nautobot_ssot_panorama", "nautobot_firewall_models"] + +# Plugins configuration settings. These settings are used by various plugins that the user may have installed. +# Each key in the dictionary is the name of an installed plugin and its value is a dictionary of settings. +PLUGINS_CONFIG = { + "nautobot_ssot": { + "hide_example_jobs": True, + }, + "nautobot_ssot_panorama": { + "panorama_url": os.getenv("NAUTOBOT_PANORAMA_URL"), + "panorama_user": os.getenv("NAUTOBOT_PANORAMA_USER"), + "panorama_pwd": os.getenv("NAUTOBOT_PANORAMA_PWD"), + }, +} +``` + +The plugin behavior can be controlled with the following list of settings + +- TODO + +Databases supported: + +- Postgres +- Mysql + +## Usage + +### API + +TODO + +## Contributing + +Pull requests are welcomed and automatically built and tested against multiple version of Python and multiple version of Nautobot through TravisCI. + +The project is packaged with a light development environment based on `docker-compose` to help with the local development of the project and to run the tests within TravisCI. + +The project is following Network to Code software development guideline and is leveraging: + +- Black, Pylint, Bandit and pydocstyle for Python linting and formatting. +- Django unit test to ensure the plugin is working properly. + +### Development Environment + +The development environment can be used in 2 ways. First, with a local poetry environment if you wish to develop outside of Docker with the caveat of using external services provided by Docker for PostgresQL and Redis. Second, all services are spun up using Docker and a local mount so you can develop locally, but Nautobot is spun up within the Docker container. + +Below is a quick start guide if you're already familiar with the development environment provided, but if you're not familiar, please read the [Getting Started Guide](GETTING_STARTED.md). + +#### Invoke + +The [PyInvoke](http://www.pyinvoke.org/) library is used to provide some helper commands based on the environment. There are a few configuration parameters which can be passed to PyInvoke to override the default configuration: + +- `nautobot_ver`: the version of Nautobot to use as a base for any built docker containers (default: latest) +- `project_name`: the default docker compose project name (default: nautobot_ssot_panorama) +- `python_ver`: the version of Python to use as a base for any built docker containers (default: 3.7) +- `local`: a boolean flag indicating if invoke tasks should be run on the host or inside the docker containers (default: False, commands will be run in docker containers) +- `compose_dir`: the full path to a directory containing the project compose files +- `compose_files`: a list of compose files applied in order (see [Multiple Compose files](https://docs.docker.com/compose/extends/#multiple-compose-files) for more information) + +Using **PyInvoke** these configuration options can be overridden using [several methods](http://docs.pyinvoke.org/en/stable/concepts/configuration.html). Perhaps the simplest is simply setting an environment variable `INVOKE_NAUTOBOT_SSOT_PANORAMA_VARIABLE_NAME` where `VARIABLE_NAME` is the variable you are trying to override. The only exception is `compose_files`, because it is a list it must be overridden in a yaml file. There is an example `invoke.yml` (`invoke.example.yml`) in this directory which can be used as a starting point. + +#### Local Poetry Development Environment + +1. Copy `development/creds.example.env` to `development/creds.env` (This file will be ignored by Git and Docker) +2. Uncomment the `NAUTOBOT_DB_HOST`, `NAUTOBOT_REDIS_HOST`, and `NAUTOBOT_CONFIG` variables in `development/creds.env` +3. Create an `invoke.yml` file with the following contents at the root of the repo (you can also `cp invoke.example.yml invoke.yml` and edit as necessary): + +```yaml +--- +{ { cookiecutter.plugin_name } }: + local: true + compose_files: + - "docker-compose.requirements.yml" +``` + +4. Run the following commands: + +```shell +poetry shell +poetry install --extras nautobot +export $(cat development/dev.env | xargs) +export $(cat development/creds.env | xargs) +invoke build +invoke start && sleep 5 +nautobot-server migrate +``` + +> If you want to develop on the latest develop branch of Nautobot, run the following command: `poetry add --optional git+https://github.com/nautobot/nautobot@develop`. After the `@` symbol must match either a branch or a tag. + +5. You can now run nautobot-server commands as you would from the [Nautobot documentation](https://nautobot.readthedocs.io/en/latest/) for example to start the development server: + +```shell +nautobot-server runserver 0.0.0.0:8080 --insecure +``` + +Nautobot server can now be accessed at [http://localhost:8080](http://localhost:8080). + +It is typically recommended to launch the Nautobot **runserver** command in a separate shell so you can keep developing and manage the webserver separately. + +#### Docker Development Environment + +This project is managed by [Python Poetry](https://python-poetry.org/) and has a few requirements to setup your development environment: + +1. Install Poetry, see the [Poetry Documentation](https://python-poetry.org/docs/#installation) for your operating system. +2. Install Docker, see the [Docker documentation](https://docs.docker.com/get-docker/) for your operating system. + +Once you have Poetry and Docker installed you can run the following commands to install all other development dependencies in an isolated python virtual environment: + +```shell +poetry shell +poetry install +invoke build +invoke start +``` + +Nautobot server can now be accessed at [http://localhost:8080](http://localhost:8080). + +To either stop or destroy the development environment use the following options. + +- **invoke stop** - Stop the containers, but keep all underlying systems intact +- **invoke destroy** - Stop and remove all containers, volumes, etc. (This results in data loss due to the volume being deleted) + +### CLI Helper Commands + +The project is coming with a CLI helper based on [invoke](http://www.pyinvoke.org/) to help setup the development environment. The commands are listed below in 3 categories `dev environment`, `utility` and `testing`. + +Each command can be executed with `invoke `. Environment variables `INVOKE_NAUTOBOT_SSOT_PANORAMA_PYTHON_VER` and `INVOKE_NAUTOBOT_SSOT_PANORAMA_NAUTOBOT_VER` may be specified to override the default versions. Each command also has its own help `invoke --help` + +#### Docker dev environment + +```no-highlight + build Build all docker images. + debug Start Nautobot and its dependencies in debug mode. + destroy Destroy all containers and volumes. + restart Restart Nautobot and its dependencies. + start Start Nautobot and its dependencies in detached mode. + stop Stop Nautobot and its dependencies. +``` + +#### Utility + +```no-highlight + cli Launch a bash shell inside the running Nautobot container. + create-user Create a new user in django (default: admin), will prompt for password. + makemigrations Run Make Migration in Django. + nbshell Launch a nbshell session. + shell-plus Launch a shell_plus session, which uses iPython and automatically imports all models. +``` + +#### Testing + +```no-highlight + bandit Run bandit to validate basic static code security analysis. + black Run black to check that Python files adhere to its style standards. + flake8 This will run flake8 for the specified name and Python version. + pydocstyle Run pydocstyle to validate docstring formatting adheres to NTC defined standards. + pylint Run pylint code analysis. + tests Run all tests for this plugin. + unittest Run Django unit tests for the plugin. +``` + +### Project Documentation + +Project documentation is generated by [mkdocs](https://www.mkdocs.org/) from the documentation located in the docs folder. You can configure [readthedocs.io](https://readthedocs.io/) to point at this folder in your repo. A container hosting the docs will be started using the invoke commands on [http://localhost:8001](http://localhost:8001), as changes are saved the docs will be automatically reloaded. + +## Questions + +For any questions or comments, please check the [FAQ](FAQ.md) first and feel free to swing by the [Network to Code slack channel](https://networktocode.slack.com/) (channel #networktocode). +Sign up [here](http://slack.networktocode.com/) + +## Screenshots + +TODO diff --git a/webinars/panorama-ssot/development/Dockerfile b/webinars/panorama-ssot/development/Dockerfile new file mode 100644 index 0000000..0fbc5f7 --- /dev/null +++ b/webinars/panorama-ssot/development/Dockerfile @@ -0,0 +1,41 @@ +ARG NAUTOBOT_VER="1.4.0" +ARG PYTHON_VER=3.8 +FROM ghcr.io/nautobot/nautobot-dev:${NAUTOBOT_VER}-py${PYTHON_VER} + +ENV prometheus_multiproc_dir=/prom_cache + +ARG NAUTOBOT_ROOT=/opt/nautobot + +ENV NAUTOBOT_ROOT ${NAUTOBOT_ROOT} + +WORKDIR $NAUTOBOT_ROOT + +# Configure poetry +RUN poetry config virtualenvs.create false \ + && poetry config installer.parallel false + +# ------------------------------------------------------------------------------------- +# Install Nautobot Plugin +# ------------------------------------------------------------------------------------- +WORKDIR /tmp/install + +# Copy in only pyproject.toml/poetry.lock to help with caching this layer if no updates to dependencies +COPY poetry.lock pyproject.toml /tmp/install/ + +# Add the requested Nautobot version to pyproject +# to install the correct version based on the NAUTOBOT_VER argument +# Otherwise Poetry will override the version in this container +# with the one in the poetry.lock +RUN poetry add nautobot=${NAUTOBOT_VER} + +# --no-root declares not to install the project package since we're wanting to take advantage of caching dependency installation +# and the project is copied in and installed after this step +RUN poetry install --no-interaction --no-ansi --no-root + +# Copy in the rest of the source code and install local Nautobot plugin +WORKDIR /source +COPY . /source +RUN cp /tmp/install/* /source/ +RUN poetry install --no-interaction --no-ansi + +COPY development/nautobot_config.py ${NAUTOBOT_ROOT}/nautobot_config.py diff --git a/webinars/panorama-ssot/development/creds.example.env b/webinars/panorama-ssot/development/creds.example.env new file mode 100644 index 0000000..e8e3759 --- /dev/null +++ b/webinars/panorama-ssot/development/creds.example.env @@ -0,0 +1,34 @@ +################################################################################ +# CREDS File: Store private information. Copied to creds.env and always ignored +################################################################################ +# Nautobot Configuration Secret Items +NAUTOBOT_CREATE_SUPERUSER=true +NAUTOBOT_DB_PASSWORD=changeme +NAUTOBOT_NAPALM_USERNAME='' +NAUTOBOT_NAPALM_PASSWORD='' +NAUTOBOT_REDIS_PASSWORD=changeme +NAUTOBOT_SECRET_KEY='changeme' +NAUTOBOT_SUPERUSER_NAME=admin +NAUTOBOT_SUPERUSER_EMAIL=admin@example.com +NAUTOBOT_SUPERUSER_PASSWORD=admin +NAUTOBOT_SUPERUSER_API_TOKEN=0123456789abcdef0123456789abcdef01234567 +NAUTOBOT_CACHEOPS_ENABLED=False + +# Panorama +NAUTOBOT_PANORAMA_URL='changeme.foo.com' +NAUTOBOT_PANORAMA_USER='changeme' +NAUTOBOT_PANORAMA_PWD='changeme' +NAUTOBOT_PANORAMA_API_KEY='changeme' + +# Postgres +POSTGRES_PASSWORD=${NAUTOBOT_DB_PASSWORD} +PGPASSWORD=${NAUTOBOT_DB_PASSWORD} + +# MySQL Credentials +MYSQL_ROOT_PASSWORD=${NAUTOBOT_DB_PASSWORD} +MYSQL_PASSWORD=${NAUTOBOT_DB_PASSWORD} + +# Use these to override values in development.env +# NAUTOBOT_DB_HOST=localhost +# NAUTOBOT_REDIS_HOST=localhost +# NAUTOBOT_CONFIG=development/nautobot_config.py diff --git a/webinars/panorama-ssot/development/development.env b/webinars/panorama-ssot/development/development.env new file mode 100644 index 0000000..dccea51 --- /dev/null +++ b/webinars/panorama-ssot/development/development.env @@ -0,0 +1,40 @@ +################################################################################ +# DEV File: Store environment information. NOTE: Secrets NOT stored here! +################################################################################ +# Nautobot Configuration Environment Variables +NAUTOBOT_ALLOWED_HOSTS=* +NAUTOBOT_BANNER_TOP="Local" +NAUTOBOT_CHANGELOG_RETENTION=0 + +NAUTOBOT_DEBUG=True +NAUTOBOT_DJANGO_EXTENSIONS_ENABLED=True +NAUTOBOT_DJANGO_TOOLBAR_ENABLED=False +NAUTOBOT_LOG_LEVEL=DEBUG +NAUTOBOT_METRICS_ENABLED=True +NAUTOBOT_NAPALM_TIMEOUT=5 +NAUTOBOT_MAX_PAGE_SIZE=0 + +# Redis Configuration Environment Variables +NAUTOBOT_REDIS_HOST=redis +NAUTOBOT_REDIS_PORT=6379 +# Uncomment NAUTOBOT_REDIS_SSL if using SSL +# NAUTOBOT_REDIS_SSL=True + +# Nautobot DB Connection Environment Variables +NAUTOBOT_DB_NAME=nautobot +NAUTOBOT_DB_USER=nautobot +NAUTOBOT_DB_HOST=db +NAUTOBOT_DB_TIMEOUT=300 + +# Use them to overwrite the defaults in nautobot_config.py +# NAUTOBOT_DB_ENGINE=django.db.backends.postgresql +# NAUTOBOT_DB_PORT=5432 + +# Needed for Postgres should match the values for Nautobot above +POSTGRES_USER=${NAUTOBOT_DB_USER} +POSTGRES_DB=${NAUTOBOT_DB_NAME} + +# Needed for MYSQL should match the values for Nautobot above +MYSQL_USER=${NAUTOBOT_DB_USER} +MYSQL_DATABASE=${NAUTOBOT_DB_NAME} +MYSQL_ROOT_HOST=% diff --git a/webinars/panorama-ssot/development/development_mysql.env b/webinars/panorama-ssot/development/development_mysql.env new file mode 100644 index 0000000..3cbb4bf --- /dev/null +++ b/webinars/panorama-ssot/development/development_mysql.env @@ -0,0 +1,4 @@ +# Custom ENVs for Mysql +# Due to docker image limitations for Mysql, we need "root" user to create more than one database table +NAUTOBOT_DB_USER=root +MYSQL_USER=${NAUTOBOT_DB_USER} diff --git a/webinars/panorama-ssot/development/docker-compose.base.yml b/webinars/panorama-ssot/development/docker-compose.base.yml new file mode 100644 index 0000000..a6fd2ae --- /dev/null +++ b/webinars/panorama-ssot/development/docker-compose.base.yml @@ -0,0 +1,49 @@ +--- +x-nautobot-build: &nautobot-build + build: + args: + NAUTOBOT_VER: "${NAUTOBOT_VER}" + PYTHON_VER: "${PYTHON_VER}" + context: "../" + dockerfile: "development/Dockerfile" +x-nautobot-base: &nautobot-base + image: "nautobot-ssot-panorama/nautobot:${NAUTOBOT_VER}-py${PYTHON_VER}" + env_file: + - "development.env" + - "creds.env" + tty: true + +version: "3.8" +services: + nautobot: + depends_on: + redis: + condition: "service_started" + db: + condition: "service_healthy" + <<: *nautobot-build + <<: *nautobot-base + worker: + entrypoint: + - "sh" + - "-c" # this is to evaluate the $NAUTOBOT_LOG_LEVEL from the env + - "watchmedo auto-restart --directory './' --pattern '*.py' --recursive -- nautobot-server celery worker -l $$NAUTOBOT_LOG_LEVEL --events" ## $$ because of docker-compose + depends_on: + - "nautobot" + healthcheck: + interval: "30s" + timeout: "10s" + start_period: "30s" + retries: 3 + test: ["CMD", "bash", "-c", "nautobot-server celery inspect ping --destination celery@$$HOSTNAME"] ## $$ because of docker-compose + <<: *nautobot-base + beat: + entrypoint: + - "sh" + - "-c" # this is to evaluate the $NAUTOBOT_LOG_LEVEL from the env + - "nautobot-server celery beat -l $$NAUTOBOT_LOG_LEVEL" ## $$ because of docker-compose + depends_on: + - "nautobot" + healthcheck: + disable: true + <<: *nautobot-base diff --git a/webinars/panorama-ssot/development/docker-compose.dev.yml b/webinars/panorama-ssot/development/docker-compose.dev.yml new file mode 100644 index 0000000..8499b7b --- /dev/null +++ b/webinars/panorama-ssot/development/docker-compose.dev.yml @@ -0,0 +1,36 @@ +# We can't remove volumes in a compose override, for the test configuration using the final containers +# we don't want the volumes so this is the default override file to add the volumes in the dev case +# any override will need to include these volumes to use them. +# see: https://github.com/docker/compose/issues/3729 +--- +version: "3.8" +services: + nautobot: + command: "nautobot-server runserver 0.0.0.0:8080" + ports: + - "8080:8080" + volumes: + - "./nautobot_config.py:/opt/nautobot/nautobot_config.py" + - "../:/source" + docs: + entrypoint: "mkdocs serve -v -a 0.0.0.0:8080" + ports: + - "8001:8080" + volumes: + - "../docs:/source/docs:ro" + - "../mkdocs.yml:/source/mkdocs.yml:ro" + image: "nautobot-ssot-panorama/nautobot:${NAUTOBOT_VER}-py${PYTHON_VER}" + healthcheck: + disable: true + tty: true + worker: + volumes: + - "./nautobot_config.py:/opt/nautobot/nautobot_config.py" + - "../:/source" +# To expose postgres or redis to the host uncomment the following +# postgres: +# ports: +# - "5432:5432" +# redis: +# ports: +# - "6379:6379" diff --git a/webinars/panorama-ssot/development/docker-compose.mysql.yml b/webinars/panorama-ssot/development/docker-compose.mysql.yml new file mode 100644 index 0000000..c7fa6a1 --- /dev/null +++ b/webinars/panorama-ssot/development/docker-compose.mysql.yml @@ -0,0 +1,34 @@ +--- +version: "3.8" + +services: + nautobot: + environment: + - "NAUTOBOT_DB_ENGINE=django.db.backends.mysql" + env_file: + - "development.env" + - "creds.env" + - "development_mysql.env" + worker: + environment: + - "NAUTOBOT_DB_ENGINE=django.db.backends.mysql" + env_file: + - "development.env" + - "creds.env" + - "development_mysql.env" + db: + image: "mysql:8" + command: + - "--default-authentication-plugin=mysql_native_password" + env_file: + - "development.env" + - "creds.env" + - "development_mysql.env" + volumes: + - "mysql_data:/var/lib/mysql" + healthcheck: + test: ["CMD", "mysqladmin", "ping", "-h", "localhost"] + timeout: "20s" + retries: 10 +volumes: + mysql_data: {} diff --git a/webinars/panorama-ssot/development/docker-compose.postgres.yml b/webinars/panorama-ssot/development/docker-compose.postgres.yml new file mode 100644 index 0000000..30ce524 --- /dev/null +++ b/webinars/panorama-ssot/development/docker-compose.postgres.yml @@ -0,0 +1,24 @@ +--- +version: "3.8" + +services: + nautobot: + environment: + - "NAUTOBOT_DB_ENGINE=django.db.backends.postgresql" + db: + image: "postgres:13-alpine" + command: postgres -c 'max_connections=250' + env_file: + - "development.env" + - "creds.env" + volumes: + # - "./nautobot.sql:/tmp/nautobot.sql" + - "postgres_data:/var/lib/postgresql/data" + healthcheck: + test: "pg_isready --username=$$POSTGRES_USER --dbname=$$POSTGRES_DB" + interval: "10s" + timeout: "5s" + retries: 10 + +volumes: + postgres_data: {} diff --git a/webinars/panorama-ssot/development/docker-compose.redis.yml b/webinars/panorama-ssot/development/docker-compose.redis.yml new file mode 100644 index 0000000..6da9fa0 --- /dev/null +++ b/webinars/panorama-ssot/development/docker-compose.redis.yml @@ -0,0 +1,12 @@ +--- +version: "3.8" +services: + redis: + image: "redis:6-alpine" + command: + - "sh" + - "-c" # this is to evaluate the $NAUTOBOT_REDIS_PASSWORD from the env + - "redis-server --appendonly yes --requirepass $$NAUTOBOT_REDIS_PASSWORD" + env_file: + - "development.env" + - "creds.env" diff --git a/webinars/panorama-ssot/development/nautobot_config.py b/webinars/panorama-ssot/development/nautobot_config.py new file mode 100644 index 0000000..cfa0ac9 --- /dev/null +++ b/webinars/panorama-ssot/development/nautobot_config.py @@ -0,0 +1,352 @@ +"""Nautobot Configuration.""" # pylint: disable=invalid-envvar-default +import os +import sys + +from nautobot.core.settings import * # noqa F401,F403 pylint: disable=wildcard-import,unused-wildcard-import +from nautobot.core.settings_funcs import is_truthy, parse_redis_connection + +######################### +# # +# Required settings # +# # +######################### + +# This is a list of valid fully-qualified domain names (FQDNs) for the Nautobot server. Nautobot will not permit write +# access to the server via any other hostnames. The first FQDN in the list will be treated as the preferred name. +# +# Example: ALLOWED_HOSTS = ['nautobot.example.com', 'nautobot.internal.local'] +ALLOWED_HOSTS = os.getenv("NAUTOBOT_ALLOWED_HOSTS", "").split(" ") + +# Database configuration. See the Django documentation for a complete list of available parameters: +# https://docs.djangoproject.com/en/stable/ref/settings/#databases + +default_db_settings = { + "django.db.backends.postgresql": { + "NAUTOBOT_DB_PORT": "5432", + }, + "django.db.backends.mysql": { + "NAUTOBOT_DB_PORT": "3306", + }, +} + +nautobot_db_engine = os.getenv("NAUTOBOT_DB_ENGINE", "django.db.backends.postgresql") + +DATABASES = { + "default": { + "NAME": os.getenv("NAUTOBOT_DB_NAME", "nautobot"), # Database name + "USER": os.getenv("NAUTOBOT_DB_USER", ""), # Database username + "PASSWORD": os.getenv("NAUTOBOT_DB_PASSWORD", ""), # Database password + "HOST": os.getenv("NAUTOBOT_DB_HOST", "localhost"), # Database server + "PORT": os.getenv( + "NAUTOBOT_DB_PORT", default_db_settings[nautobot_db_engine]["NAUTOBOT_DB_PORT"] + ), # Database port, default to postgres + "CONN_MAX_AGE": int(os.getenv("NAUTOBOT_DB_TIMEOUT", 300)), # Database timeout + "ENGINE": nautobot_db_engine, + # "OPTIONS": {"charset": "utf8mb4"}, # For MySQL unicode emoji support, uncomment this line + } +} + +# Ensure proper Unicode handling for MySQL +if DATABASES["default"]["ENGINE"] == "django.db.backends.mysql": + DATABASES["default"]["OPTIONS"] = {"charset": "utf8mb4"} + +# Nautobot uses RQ for task scheduling. These are the following defaults. +# For detailed configuration see: https://github.com/rq/django-rq#installation +# These defaults utilize the Django `CACHES` setting defined above for django-redis. +# See: https://github.com/rq/django-rq#support-for-django-redis-and-django-redis-cache +RQ_QUEUES = { + "default": { + "USE_REDIS_CACHE": "default", + }, + "check_releases": { + "USE_REDIS_CACHE": "default", + }, + "custom_fields": { + "USE_REDIS_CACHE": "default", + }, + "webhooks": { + "USE_REDIS_CACHE": "default", + }, +} + +# Nautobot uses Cacheops for database query caching. These are the following defaults. +# For detailed configuration see: https://github.com/Suor/django-cacheops#setup +CACHEOPS_REDIS = os.getenv("NAUTOBOT_CACHEOPS_REDIS", parse_redis_connection(redis_database=1)) + +# The django-redis cache is used to establish concurrent locks using Redis. The +# django-rq settings will use the same instance/database by default. +CACHES = { + "default": { + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": parse_redis_connection(redis_database=0), + "TIMEOUT": 300, + "OPTIONS": { + "CLIENT_CLASS": "django_redis.client.DefaultClient", + }, + } +} + +# This key is used for secure generation of random numbers and strings. It must never be exposed outside of this file. +# For optimal security, SECRET_KEY should be at least 50 characters in length and contain a mix of letters, numbers, and +# symbols. Nautobot will not run without this defined. For more information, see +# https://docs.djangoproject.com/en/stable/ref/settings/#std:setting-SECRET_KEY +SECRET_KEY = os.getenv("NAUTOBOT_SECRET_KEY") + + +######################### +# # +# Optional settings # +# # +######################### + +# Specify one or more name and email address tuples representing Nautobot administrators. These people will be notified of +# application errors (assuming correct email settings are provided). +ADMINS = [ + # ['John Doe', 'jdoe@example.com'], +] + +# URL schemes that are allowed within links in Nautobot +ALLOWED_URL_SCHEMES = ( + "file", + "ftp", + "ftps", + "http", + "https", + "irc", + "mailto", + "sftp", + "ssh", + "tel", + "telnet", + "tftp", + "vnc", + "xmpp", +) + +# Optionally display a persistent banner at the top and/or bottom of every page. HTML is allowed. To display the same +# content in both banners, define BANNER_TOP and set BANNER_BOTTOM = BANNER_TOP. +BANNER_TOP = os.getenv("NAUTOBOT_BANNER_TOP", "") +BANNER_BOTTOM = os.getenv("NAUTOBOT_BANNER_BOTTOM", "") + +# Text to include on the login page above the login form. HTML is allowed. +BANNER_LOGIN = os.getenv("NAUTOBOT_BANNER_LOGIN", "") + +# Cache timeout in seconds. Cannot be 0. Defaults to 900 (15 minutes). To disable caching, set CACHEOPS_ENABLED to False +CACHEOPS_DEFAULTS = {"timeout": int(os.getenv("NAUTOBOT_CACHEOPS_TIMEOUT", 900))} + +# Set to False to disable caching with cacheops. (Default: True) +CACHEOPS_ENABLED = is_truthy(os.getenv("NAUTOBOT_CACHEOPS_ENABLED", True)) + +# Maximum number of days to retain logged changes. Set to 0 to retain changes indefinitely. (Default: 90) +CHANGELOG_RETENTION = int(os.getenv("NAUTOBOT_CHANGELOG_RETENTION", 90)) + +# If True, all origins will be allowed. Other settings restricting allowed origins will be ignored. +# Defaults to False. Setting this to True can be dangerous, as it allows any website to make +# cross-origin requests to yours. Generally you'll want to restrict the list of allowed origins with +# CORS_ALLOWED_ORIGINS or CORS_ALLOWED_ORIGIN_REGEXES. +CORS_ALLOW_ALL_ORIGINS = is_truthy(os.getenv("NAUTOBOT_CORS_ALLOW_ALL_ORIGINS", False)) + +# A list of origins that are authorized to make cross-site HTTP requests. Defaults to []. +CORS_ALLOWED_ORIGINS = [ + # 'https://hostname.example.com', +] + +# A list of strings representing regexes that match Origins that are authorized to make cross-site +# HTTP requests. Defaults to []. +CORS_ALLOWED_ORIGIN_REGEXES = [ + # r'^(https?://)?(\w+\.)?example\.com$', +] + +# FQDNs that are considered trusted origins for secure, cross-domain, requests such as HTTPS POST. +# If running Nautobot under a single domain, you may not need to set this variable; +# if running on multiple domains, you *may* need to set this variable to more or less the same as ALLOWED_HOSTS above. +# https://docs.djangoproject.com/en/stable/ref/settings/#csrf-trusted-origins +CSRF_TRUSTED_ORIGINS = [] + +# Set to True to enable server debugging. WARNING: Debugging introduces a substantial performance penalty and may reveal +# sensitive information about your installation. Only enable debugging while performing testing. Never enable debugging +# on a production system. +DEBUG = is_truthy(os.getenv("NAUTOBOT_DEBUG", False)) + +# Enforcement of unique IP space can be toggled on a per-VRF basis. To enforce unique IP space +# within the global table (all prefixes and IP addresses not assigned to a VRF), set +# ENFORCE_GLOBAL_UNIQUE to True. +ENFORCE_GLOBAL_UNIQUE = is_truthy(os.getenv("NAUTOBOT_ENFORCE_GLOBAL_UNIQUE", False)) + +# Exempt certain models from the enforcement of view permissions. Models listed here will be viewable by all users and +# by anonymous users. List models in the form `.`. Add '*' to this list to exempt all models. +EXEMPT_VIEW_PERMISSIONS = [ + # 'dcim.site', + # 'dcim.region', + # 'ipam.prefix', +] + +# Global 3rd-party authentication settings +EXTERNAL_AUTH_DEFAULT_GROUPS = [] +EXTERNAL_AUTH_DEFAULT_PERMISSIONS = {} + +# If hosting Nautobot in a subdirectory, you must set this value to match the base URL prefix configured in your HTTP server (e.g. `/nautobot/`). When not set, URLs will default to being prefixed by `/`. +FORCE_SCRIPT_NAME = None + +# When set to `True`, users with limited permissions will only be able to see items in the UI they have access too. +HIDE_RESTRICTED_UI = is_truthy(os.getenv("NAUTOBOT_HIDE_RESTRICTED_UI", False)) + +# HTTP proxies Nautobot should use when sending outbound HTTP requests (e.g. for webhooks). +# HTTP_PROXIES = { +# 'http': 'http://10.10.1.10:3128', +# 'https': 'http://10.10.1.10:1080', +# } + +# IP addresses recognized as internal to the system. The debugging toolbar will be available only to clients accessing +# Nautobot from an internal IP. +INTERNAL_IPS = ("127.0.0.1", "::1") + +# Enable custom logging. Please see the Django documentation for detailed guidance on configuring custom logs: +# https://docs.djangoproject.com/en/stable/topics/logging/ +LOG_LEVEL = "DEBUG" if DEBUG else "INFO" +LOGGING = { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "normal": { + "format": "%(asctime)s.%(msecs)03d %(levelname)-7s %(name)s :\n %(message)s", + "datefmt": "%H:%M:%S", + }, + "verbose": { + "format": "%(asctime)s.%(msecs)03d %(levelname)-7s %(name)-20s %(filename)-15s %(funcName)30s() :\n %(message)s", + "datefmt": "%H:%M:%S", + }, + }, + "handlers": { + "normal_console": { + "level": "DEBUG", + "class": "logging.StreamHandler", + "formatter": "normal", + }, + }, + "loggers": { + "django": {"handlers": ["normal_console"], "level": "INFO"}, + "nautobot": { + "handlers": ["normal_console"], + "level": LOG_LEVEL, + }, + "rq.worker": { + "handlers": ["normal_console"], + "level": LOG_LEVEL, + }, + }, +} + +# Setting this to True will display a "maintenance mode" banner at the top of every page. +MAINTENANCE_MODE = is_truthy(os.getenv("NAUTOBOT_MAINTENANCE_MODE", False)) + +# An API consumer can request an arbitrary number of objects =by appending the "limit" parameter to the URL (e.g. +# "?limit=1000"). This setting defines the maximum limit. Setting it to 0 or None will allow an API consumer to request +# all objects by specifying "?limit=0". +MAX_PAGE_SIZE = int(os.getenv("NAUTOBOT_MAX_PAGE_SIZE", 1000)) + +# The file path where uploaded media such as image attachments are stored. A trailing slash is not needed. Note that +# the default value of this setting is within the invoking user's home directory +# MEDIA_ROOT = os.path.expanduser('~/.nautobot/media') + +# By default uploaded media is stored on the local filesystem. Using Django-storages is also supported. Provide the +# class path of the storage driver in STORAGE_BACKEND and any configuration options in STORAGE_CONFIG. For example: +# STORAGE_BACKEND = 'storages.backends.s3boto3.S3Boto3Storage' +# STORAGE_CONFIG = { +# 'AWS_ACCESS_KEY_ID': 'Key ID', +# 'AWS_SECRET_ACCESS_KEY': 'Secret', +# 'AWS_STORAGE_BUCKET_NAME': 'nautobot', +# 'AWS_S3_REGION_NAME': 'eu-west-1', +# } + +# Expose Prometheus monitoring metrics at the HTTP endpoint '/metrics' +METRICS_ENABLED = is_truthy(os.getenv("NAUTOBOT_METRICS_ENABLED", False)) + +# Credentials that Nautobot will uses to authenticate to devices when connecting via NAPALM. +NAPALM_USERNAME = os.getenv("NAUTOBOT_NAPALM_USERNAME", "") +NAPALM_PASSWORD = os.getenv("NAUTOBOT_NAPALM_PASSWORD", "") + +# NAPALM timeout (in seconds). (Default: 30) +NAPALM_TIMEOUT = int(os.getenv("NAUTOBOT_NAPALM_TIMEOUT", 30)) + +# NAPALM optional arguments (see https://napalm.readthedocs.io/en/latest/support/#optional-arguments). Arguments must +# be provided as a dictionary. +NAPALM_ARGS = {} + +# Determine how many objects to display per page within a list. (Default: 50) +PAGINATE_COUNT = int(os.getenv("NAUTOBOT_PAGINATE_COUNT", 50)) + +# Enable installed plugins. Add the name of each plugin to the list. +PLUGINS = ["nautobot_ssot", "nautobot_ssot_panorama", "nautobot_firewall_models", "nautobot_golden_config"] + +# Plugins configuration settings. These settings are used by various plugins that the user may have installed. +# Each key in the dictionary is the name of an installed plugin and its value is a dictionary of settings. +PLUGINS_CONFIG = { + "nautobot_ssot": { + "hide_example_jobs": True, + }, + "nautobot_ssot_panorama": { + "panorama_url": os.getenv("NAUTOBOT_PANORAMA_URL"), + "panorama_user": os.getenv("NAUTOBOT_PANORAMA_USER"), + "panorama_pwd": os.getenv("NAUTOBOT_PANORAMA_PWD"), + "panorama_api_key": os.getenv("NAUTOBOT_PANORAMA_API_KEY"), + }, +} + +# When determining the primary IP address for a device, IPv6 is preferred over IPv4 by default. Set this to True to +# prefer IPv4 instead. +PREFER_IPV4 = is_truthy(os.getenv("NAUTOBOT_PREFER_IPV4", False)) + +# Rack elevation size defaults, in pixels. For best results, the ratio of width to height should be roughly 10:1. +RACK_ELEVATION_DEFAULT_UNIT_HEIGHT = int(os.getenv("NAUTOBOT_RACK_ELEVATION_DEFAULT_UNIT_HEIGHT", 22)) +RACK_ELEVATION_DEFAULT_UNIT_WIDTH = int(os.getenv("NAUTOBOT_RACK_ELEVATION_DEFAULT_UNIT_WIDTH", 220)) + +# Remote auth backend settings +REMOTE_AUTH_AUTO_CREATE_USER = False +REMOTE_AUTH_HEADER = "HTTP_REMOTE_USER" + +# This determines how often the GitHub API is called to check the latest release of Nautobot. Must be at least 1 hour. +RELEASE_CHECK_TIMEOUT = int(os.getenv("NAUTOBOT_RELEASE_CHECK_TIMEOUT", 24 * 3600)) + +# This repository is used to check whether there is a new release of Nautobot available. Set to None to disable the +# version check or use the URL below to check for release in the official Nautobot repository. +RELEASE_CHECK_URL = os.getenv("NAUTOBOT_RELEASE_CHECK_URL", None) +# RELEASE_CHECK_URL = 'https://api.github.com/repos/nautobot/nautobot/releases' + +# The length of time (in seconds) for which a user will remain logged into the web UI before being prompted to +# re-authenticate. (Default: 1209600 [14 days]) +SESSION_COOKIE_AGE = int(os.getenv("NAUTOBOT_SESSION_COOKIE_AGE", 1209600)) # 2 weeks, in seconds + +# By default, Nautobot will store session data in the database. Alternatively, a file path can be specified here to use +# local file storage instead. (This can be useful for enabling authentication on a standby instance with read-only +# database access.) Note that the user as which Nautobot runs must have read and write permissions to this path. +SESSION_FILE_PATH = os.getenv("NAUTOBOT_SESSION_FILE_PATH", None) + +# Configure SSO, for more information see docs/configuration/authentication/sso.md +SOCIAL_AUTH_POSTGRES_JSONFIELD = False + +# Time zone (default: UTC) +TIME_ZONE = os.getenv("NAUTOBOT_TIME_ZONE", "UTC") + +# Date/time formatting. See the following link for supported formats: +# https://docs.djangoproject.com/en/stable/ref/templates/builtins/#date +DATE_FORMAT = os.getenv("NAUTOBOT_DATE_FORMAT", "N j, Y") +SHORT_DATE_FORMAT = os.getenv("NAUTOBOT_SHORT_DATE_FORMAT", "Y-m-d") +TIME_FORMAT = os.getenv("NAUTOBOT_TIME_FORMAT", "g:i a") +SHORT_TIME_FORMAT = os.getenv("NAUTOBOT_SHORT_TIME_FORMAT", "H:i:s") +DATETIME_FORMAT = os.getenv("NAUTOBOT_DATETIME_FORMAT", "N j, Y g:i a") +SHORT_DATETIME_FORMAT = os.getenv("NAUTOBOT_SHORT_DATETIME_FORMAT", "Y-m-d H:i") + +# A list of strings designating all applications that are enabled in this Django installation. Each string should be a dotted Python path to an application configuration class (preferred), or a package containing an application. +# https://nautobot.readthedocs.io/en/latest/configuration/optional-settings/#extra-applications +EXTRA_INSTALLED_APPS = [] + +# Django Debug Toolbar +DJANGO_TOOLBAR_ENABLED = is_truthy(os.getenv("NAUTOBOT_DJANGO_TOOLBAR_ENABLED", False)) +TESTING = len(sys.argv) > 1 and sys.argv[1] == "test" +DEBUG_TOOLBAR_CONFIG = {"SHOW_TOOLBAR_CALLBACK": lambda _request: DEBUG and not TESTING} + +if DJANGO_TOOLBAR_ENABLED and "debug_toolbar" not in EXTRA_INSTALLED_APPS: + EXTRA_INSTALLED_APPS.append("debug_toolbar") +if DJANGO_TOOLBAR_ENABLED and "debug_toolbar.middleware.DebugToolbarMiddleware" not in MIDDLEWARE: # noqa F405 + MIDDLEWARE.insert(0, "debug_toolbar.middleware.DebugToolbarMiddleware") # noqa F405 diff --git a/webinars/panorama-ssot/docs/extra.css b/webinars/panorama-ssot/docs/extra.css new file mode 100644 index 0000000..6a95f35 --- /dev/null +++ b/webinars/panorama-ssot/docs/extra.css @@ -0,0 +1,19 @@ +/* Images */ +img { + display: block; + margin-left: auto; + margin-right: auto; +} + +/* Tables */ +table { + margin-bottom: 24px; + width: 100%; +} +th { + background-color: #f0f0f0; + padding: 6px; +} +td { + padding: 6px; +} diff --git a/webinars/panorama-ssot/docs/index.md b/webinars/panorama-ssot/docs/index.md new file mode 100644 index 0000000..85f5695 --- /dev/null +++ b/webinars/panorama-ssot/docs/index.md @@ -0,0 +1,17 @@ +# NautobotSSoTPanorama + +TODO: Write plugin documentation, the outline here is provided as a guide and should be expanded upon. If more detail is required you are encouraged to expand on the table of contents (TOC) in `mkdocs.yml` to add additional pages. + +## Description + +## Installation + +## Configuration + +## Usage + +## API + +## Views + +## Models diff --git a/webinars/panorama-ssot/docs/requirements.txt b/webinars/panorama-ssot/docs/requirements.txt new file mode 100644 index 0000000..9f37075 --- /dev/null +++ b/webinars/panorama-ssot/docs/requirements.txt @@ -0,0 +1 @@ +mkdocs==1.3.0 diff --git a/webinars/panorama-ssot/invoke.example.yml b/webinars/panorama-ssot/invoke.example.yml new file mode 100644 index 0000000..4a00283 --- /dev/null +++ b/webinars/panorama-ssot/invoke.example.yml @@ -0,0 +1,12 @@ +--- +nautobot_ssot_panorama: + project_name: "nautobot-ssot-panorama" + nautobot_ver: "latest" + local: false + python_ver: "3.7" + compose_dir: "development" + compose_files: + - "docker-compose.base.yml" + - "docker-compose.redis.yml" + - "docker-compose.postgres.yml" + - "docker-compose.dev.yml" diff --git a/webinars/panorama-ssot/invoke.mysql.yml b/webinars/panorama-ssot/invoke.mysql.yml new file mode 100644 index 0000000..df19f37 --- /dev/null +++ b/webinars/panorama-ssot/invoke.mysql.yml @@ -0,0 +1,12 @@ +--- +nautobot_ssot_panorama: + project_name: "nautobot-ssot-panorama" + nautobot_ver: "latest" + local: false + python_ver: "3.7" + compose_dir: "development" + compose_files: + - "docker-compose.base.yml" + - "docker-compose.redis.yml" + - "docker-compose.mysql.yml" + - "docker-compose.dev.yml" diff --git a/webinars/panorama-ssot/mkdocs.yml b/webinars/panorama-ssot/mkdocs.yml new file mode 100644 index 0000000..6d6e3ae --- /dev/null +++ b/webinars/panorama-ssot/mkdocs.yml @@ -0,0 +1,23 @@ +--- +dev_addr: "127.0.0.1:8001" +edit_uri: "edit/main/nautobot-plugin-ssot-panorama/docs" +site_name: "NautobotSSoTPanorama Documentation" +site_url: "https://nautobot-plugin-ssot-panorama.readthedocs.io/" +repo_url: "https://github.com/networktocode/nautobot-plugin-ssot-panorama" +python: + install: + - requirements: "docs/requirements.txt" +theme: + name: "readthedocs" + navigation_depth: 4 + hljs_languages: + - "django" + - "yaml" +extra_css: + - "extra.css" +markdown_extensions: + - "admonition" + - toc: + permalink: true +nav: + - Introduction: "index.md" diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/__init__.py b/webinars/panorama-ssot/nautobot_ssot_panorama/__init__.py new file mode 100644 index 0000000..1279f92 --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/__init__.py @@ -0,0 +1,39 @@ +"""Plugin declaration for nautobot_ssot_panorama.""" +# Metadata is inherited from Nautobot. If not including Nautobot in the environment, this should be added +try: + from importlib import metadata +except ImportError: + # Python version < 3.8 + import importlib_metadata as metadata + +__version__ = metadata.version(__name__) + +from nautobot.core.signals import nautobot_database_ready +from nautobot.extras.plugins import PluginConfig + +from nautobot_ssot_panorama.signals import nautobot_database_ready_callback + + +class NautobotSSoTPanoramaConfig(PluginConfig): + """Plugin configuration for the nautobot_ssot_panorama plugin.""" + + name = "nautobot_ssot_panorama" + verbose_name = "Nautobot SSoT Panorama" + version = __version__ + author = "Network to Code, LLC" + description = "SSoT sync capabilities with Nautobot Firewall Models Plugin and Panorama." + base_url = "ssot-panorama" + required_settings = [] + min_version = "1.4.0" + max_version = "1.9999" + default_settings = {} + caching_config = {} + + def ready(self): + """Trigger callback when database is ready.""" + super().ready() + + nautobot_database_ready.connect(nautobot_database_ready_callback, sender=self) + + +config = NautobotSSoTPanoramaConfig # pylint:disable=invalid-name diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/api/__init__.py b/webinars/panorama-ssot/nautobot_ssot_panorama/api/__init__.py new file mode 100644 index 0000000..129a2cb --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/api/__init__.py @@ -0,0 +1 @@ +"""REST API module for nautobot_ssot_panorama plugin.""" diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/api/serializers.py b/webinars/panorama-ssot/nautobot_ssot_panorama/api/serializers.py new file mode 100644 index 0000000..f8fc2ef --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/api/serializers.py @@ -0,0 +1,49 @@ +"""Plugin API serializers.""" +from rest_framework import serializers + +from nautobot.dcim.api.nested_serializers import NestedDeviceSerializer, NestedInterfaceSerializer +from nautobot.extras.api.serializers import NautobotModelSerializer + +from nautobot_ssot_panorama.models import VirtualSystem, LogicalGroup, ControlPlaneSystem + + +class ControlPlaneSystemSerializer(NautobotModelSerializer): # pylint: disable=too-many-ancestors + """Used for normal CRUD operations.""" + + url = serializers.HyperlinkedIdentityField( + view_name="plugins-api:nautobot_ssot_panorama-api:controlplanesystem-detail" + ) + device = NestedDeviceSerializer() + + class Meta: + """Meta class.""" + + model = ControlPlaneSystem + fields = ["url", "id", "name", "device", "port", "fqdn_or_ip", "verify_ssl"] + + +class VirtualSystemSerializer(NautobotModelSerializer): # pylint: disable=too-many-ancestors + """Used for normal CRUD operations.""" + + url = serializers.HyperlinkedIdentityField(view_name="plugins-api:nautobot_ssot_panorama-api:virtualsystem-detail") + device = NestedDeviceSerializer() + interfaces = NestedInterfaceSerializer(many=True) + + class Meta: + """Meta class.""" + + model = VirtualSystem + fields = ["url", "id", "name", "system_id", "device", "interfaces"] + + +class LogicalGroupSerializer(NautobotModelSerializer): # pylint: disable=too-many-ancestors + """Used for normal CRUD operations.""" + + url = serializers.HyperlinkedIdentityField(view_name="plugins-api:nautobot_ssot_panorama-api:logicalgroup-detail") + devices = NestedDeviceSerializer(many=True) + + class Meta: + """Meta class.""" + + model = LogicalGroup + fields = ["url", "id", "name", "parent", "children", "devices", "virtual_systems"] diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/api/urls.py b/webinars/panorama-ssot/nautobot_ssot_panorama/api/urls.py new file mode 100644 index 0000000..3ad14d3 --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/api/urls.py @@ -0,0 +1,13 @@ +"""Django API urlpatterns declaration for firewall model plugin.""" + +from nautobot.core.api import OrderedDefaultRouter + +from nautobot_ssot_panorama.api import views + + +router = OrderedDefaultRouter() +router.register("control-plane-system", views.ControlPlaneSystemViewSet) +router.register("virtual-system", views.VirtualSystemViewSet) +router.register("logical-group", views.LogicalGroupViewSet) + +urlpatterns = router.urls diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/api/views.py b/webinars/panorama-ssot/nautobot_ssot_panorama/api/views.py new file mode 100644 index 0000000..406eca5 --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/api/views.py @@ -0,0 +1,29 @@ +"""API views for firewall models.""" +from nautobot.extras.api.views import NautobotModelViewSet + +from nautobot_ssot_panorama import filters, models +from nautobot_ssot_panorama.api import serializers + + +class ControlPlaneSystemViewSet(NautobotModelViewSet): # pylint: disable=too-many-ancestors + """ControlPlaneSystem viewset.""" + + queryset = models.ControlPlaneSystem.objects.all() + serializer_class = serializers.ControlPlaneSystemSerializer + filterset_class = filters.ControlPlaneSystemFilterSet + + +class VirtualSystemViewSet(NautobotModelViewSet): # pylint: disable=too-many-ancestors + """VirtualSystem viewset.""" + + queryset = models.VirtualSystem.objects.all() + serializer_class = serializers.VirtualSystemSerializer + filterset_class = filters.VirtualSystemFilterSet + + +class LogicalGroupViewSet(NautobotModelViewSet): # pylint: disable=too-many-ancestors + """LogicalGroup viewset.""" + + queryset = models.LogicalGroup.objects.all() + serializer_class = serializers.LogicalGroupSerializer + filterset_class = filters.LogicalGroupFilterSet diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/diffsync/adapters/__init__.py b/webinars/panorama-ssot/nautobot_ssot_panorama/diffsync/adapters/__init__.py new file mode 100644 index 0000000..c769461 --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/diffsync/adapters/__init__.py @@ -0,0 +1 @@ +"""Adapter classes for loading DiffSyncModels with data from Panorama or Nautobot.""" diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/diffsync/adapters/nautobot.py b/webinars/panorama-ssot/nautobot_ssot_panorama/diffsync/adapters/nautobot.py new file mode 100644 index 0000000..11f0ec7 --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/diffsync/adapters/nautobot.py @@ -0,0 +1,423 @@ +"""Nautobot Adapter for Panorama SSoT plugin.""" +from diffsync import DiffSync, DiffSyncModel +from diffsync.exceptions import ObjectNotFound +from django.contrib.contenttypes.models import ContentType +from nautobot.extras.models import Relationship, RelationshipAssociation +from nautobot.ipam.models import IPAddress, Prefix +from nautobot_firewall_models.models import ( + AddressObject, + ApplicationObject, + ApplicationObjectGroup, + FQDN, + IPRange, + AddressObjectGroup, + ServiceObject, + ServiceObjectGroup, + Zone, + Policy, +) + +from nautobot_ssot_panorama.diffsync.models.nautobot import ( + NautobotAddressObject, + NautobotAddressGroup, + NautobotApplicationObject, + NautobotApplicationGroup, + NautobotDeviceGroup, + NautobotFirewall, + NautobotServiceObject, + NautobotServiceGroup, + NautobotUserObjectGroup, + NautobotZone, + NautobotPolicy, + NautobotPolicyRule, + NautobotVsys, +) + + +class NautobotAdapter(DiffSync): + """DiffSync adapter for Nautobot.""" + + addressobject = NautobotAddressObject + addressgroup = NautobotAddressGroup + application = NautobotApplicationObject + applicationgroup = NautobotApplicationGroup + serviceobject = NautobotServiceObject + servicegroup = NautobotServiceGroup + userobjectgroup = NautobotUserObjectGroup + firewall = NautobotFirewall + zone = NautobotZone + vsys = NautobotVsys + policyrule = NautobotPolicyRule + policy = NautobotPolicy + devicegroup = NautobotDeviceGroup + + top_level = [ + "addressobject", + "addressgroup", + "serviceobject", + "servicegroup", + "userobjectgroup", + "application", + "applicationgroup", + "firewall", + "zone", + "vsys", + "policyrule", + "policy", + "devicegroup", + ] + + def __init__(self, *args, job=None, sync=None, **kwargs): + """Initialize Nautobot. + + Args: + job (object, optional): Nautobot job. Defaults to None. + sync (object, optional): Nautobot DiffSync. Defaults to None. + """ + super().__init__(*args, **kwargs) + self.job = job + self.sync = sync + self._backend = "Nautobot" + self._app_relationship = Relationship.objects.get(name="Application Container") + self._app_content_type = ContentType.objects.get_for_model(ApplicationObject) + + def get_or_add(self, obj: "DiffSyncModel") -> "DiffSyncModel": + """Ensures a model is added. + + Args: + obj (DiffSyncModel): Instance of model + + Returns: + DiffSyncModel: Instance of model that has been added + """ + model = obj.get_type() + ids = obj.get_unique_id() + try: + return self.store.get(model=model, identifier=ids) + except ObjectNotFound: + self.add(obj=obj) + return obj + + def load(self): + """Load data from Nautobot into DiffSync models.""" + self.job.log_info(None, f"Loading data from {self._backend}") + self.load_device_groups() + + def sync_complete(self, source, diff, *args, **kwargs): + """Creates custom relationship for app containters.""" + if source._backend == "Nautobot": + return + self.job.log_info(None, "Creating Custom Relationship For App Containers") + apps = source.get_all("application") + for pan_app in apps: + if pan_app.type != "container": + continue + n_app = self.get("application", pan_app.name) + src_app = ApplicationObject.objects.get(name=n_app.name) + pan_members = pan_app.members if pan_app.members else [] + n_members = n_app.members if n_app.members else [] + if sorted(n_members) == sorted(pan_members): + continue + missing = [app for app in pan_members if app not in n_members] + extra = [app for app in n_members if app not in pan_members] + if extra: + RelationshipAssociation.objects.filter( + relationship_id=self._app_relationship.id, + source_id=src_app.id, + destination_id__in=ApplicationObject.objects.filter(name__in=extra).values_list("id", flat=True), + ).delete() + if missing: + for i in missing: + RelationshipAssociation.objects.create( + relationship_id=self._app_relationship.id, + source_id=src_app.id, + source_type=self._app_content_type, + destination_id=ApplicationObject.objects.get(name=i).id, + destination_type=self._app_content_type, + ) + + def load_device_groups(self): + """Load Nautobot DeviceGroup.""" + for group in self.job.kwargs["panorama"].logical_groups.all(): + firewalls = [] + vsyss = [] + parent = None + if group.parent: + parent = group.parent.name + for firewall in group.devices.all(): + self.add( + self.firewall( + name=firewall.name, + serial=firewall.serial, + interfaces=sorted([i.name for i in firewall.interfaces.all()]), + device_group=group.name, + ) + ) + firewalls.append(firewall.serial) + for vsys in group.virtual_systems.all(): + self.add( + self.vsys( + name=vsys.name, + parent=vsys.device.serial, + interfaces=sorted([i.name for i in vsys.interfaces.all()]), + ) + ) + vsyss.append({"parent": vsys.device.serial, "name": vsys.name}) + pre_policy = None + post_policy = None + if group.pre_policy: + self.load_policy(group.pre_policy, group.name, "PRE") + pre_policy = group.pre_policy.name + if group.post_policy: + self.load_policy(group.post_policy, group.name, "POST") + post_policy = group.post_policy.name + device_group = self.devicegroup( + name=group.name, + panorama=str(group.control_plane.id), + vsys=sorted(vsyss), + firewalls=sorted(firewalls), + pre_policy=pre_policy, + post_policy=post_policy, + parent=parent, + ) + self.add(device_group) + + def load_policy(self, policy: "Policy", group_name, pre_post) -> "NautobotPolicy": + """Loads a policy. + + Args: + policy (Policy): Nautobot Policy object + + Returns: + NautobotPolicy: DiffSyncModel isntance + """ + rules = [] + for rule in policy.policy_rules.all(): + applications = [] + applicationgroups = [] + usergroups = [] + destserviceobjects = [] + destservicegroups = [] + sourcezone = None + destzone = None + sourceaddressobjects = [] + sourceaddressgroups = [] + destaddressobjects = [] + destaddressgroups = [] + + # Load Applications + for app in rule.applications.all(): + self.load_application(app) + applications.append(app.name) + for group in rule.application_groups.all(): + self.load_application_group(group) + applicationgroups.append(group.name) + + # Load User + for user_group in rule.source_user_groups.all(): + self.get_or_add(self.userobjectgroup(name=user_group.name)) + usergroups.append(user_group.name) + + # Load Addresses + for address in rule.source_addresses.all(): + self.load_address_object(address) + sourceaddressobjects.append(address.name) + for group in rule.source_address_groups.all(): + self.load_address_group(group) + sourceaddressgroups.append(group.name) + for address in rule.destination_addresses.all(): + self.load_address_object(address) + destaddressobjects.append(address.name) + for group in rule.destination_address_groups.all(): + self.load_address_group(group) + destaddressgroups.append(group.name) + + # Load Services + for service in rule.destination_services.all(): + self.load_service_object(service) + destserviceobjects.append(service.name) + for group in rule.destination_service_groups.all(): + self.load_service_group(group) + destservicegroups.append(group.name) + + # Load Zone + if rule.source_zone: + self.load_zone(rule.source_zone) + sourcezone = rule.source_zone.name + if rule.destination_zone: + self.load_zone(rule.destination_zone) + destzone = rule.destination_zone.name + self.get_or_add( + self.policyrule( + name=rule.name, + action=rule.action, + log=rule.log, + index=rule.index, + applications=sorted(applications), + applicationgroups=sorted(applicationgroups), + usergroups=sorted(usergroups), + destserviceobjects=sorted(destserviceobjects), + destservicegroups=sorted(destservicegroups), + sourcezone=sourcezone, + destzone=destzone, + sourceaddressobjects=sorted(sourceaddressobjects), + sourceaddressgroups=sorted(sourceaddressgroups), + destaddressobjects=sorted(destaddressobjects), + destaddressgroups=sorted(destaddressgroups), + parent=group_name, + pre_post=pre_post, + ) + ) + rules.append(rule.name) + + pol_obj = self.policy(name=policy.name, policyrule_names=sorted(rules)) + return self.get_or_add(pol_obj) + + def load_zone(self, zone: "Zone") -> "NautobotZone": + """Loads zone. + + Args: + zone (Zone): Nautobot Zone + + Returns: + NautobotZone: DiffSyncModel for Zone + """ + firewalls = {} + for iface in zone.interfaces.all(): + if not firewalls.get(iface.device.serial): + firewalls[iface.device.serial] = [] + firewalls[iface.device.serial].append(iface.name) + return self.get_or_add(self.zone(name=zone.name, firewalls=firewalls)) + + def load_service_object(self, service: "ServiceObject") -> "NautobotServiceObject": + """Loads a service object. + + Args: + service (ServiceObject): _description_ + + Returns: + NautobotServiceObject: _description_ + """ + return self.get_or_add(self.serviceobject(name=service.name, port=service.port, protocol=service.ip_protocol)) + + def load_service_group(self, group: "ServiceObjectGroup") -> "NautobotServiceGroup": + """Loads a service group. + + Args: + group (ServiceObjectGroup): _description_ + + Returns: + NautobotServiceGroup: _description_ + """ + services = [] + for service in group.service_objects.all(): + self.load_service_object(service) + services.append(service.name) + return self.get_or_add(self.servicegroup(name=group.name, serviceobjects=sorted(services))) + + def load_address_group(self, group: "AddressObjectGroup") -> "NautobotAddressGroup": + """Loads a address group. + + Args: + group (AddressObjectGroup): _description_ + + Returns: + NautobotAddressGroup: _description_ + """ + addresses = [] + for address in group.address_objects.all(): + self.load_address_object(address) + addresses.append(address.name) + return self.get_or_add( + self.addressgroup( + name=group.name, + type=group.custom_field_data.get("group-type"), + filter=group.custom_field_data.get("dynamic-address-group-filter"), + addressobjects=sorted(addresses), + ) + ) + + def load_address_object(self, address: "AddressObject") -> "NautobotAddressObject": + """Loads a address object. + + Args: + address (AddressObject): _description_ + + Returns: + NautobotAddressObject: _description_ + """ + if isinstance(address.address, FQDN): + address_object = self.addressobject(name=address.name, address=address.address.name, type="fqdn") + elif isinstance(address.address, IPRange): + address_object = self.addressobject(name=address.name, address=str(address.address), type="ip-range") + elif isinstance(address.address, IPAddress): + if (address.address.family == 4 and address.address.prefix_length == 32) or ( + address.address.family == 6 and address.address.prefix_length == 64 + ): + addr = address.address.host + else: + addr = str(address.address) + address_object = self.addressobject(name=address.name, address=addr, type="ip-netmask") + elif isinstance(address.address, Prefix): + address_object = self.addressobject(name=address.name, address=str(address.address), type="ip-netmask") + return self.get_or_add(address_object) + + def load_application_group(self, group: "ApplicationObjectGroup") -> "NautobotApplicationGroup": + """Loads a application group. + + Args: + group (ApplicationObjectGroup): _description_ + + Returns: + NautobotApplicationGroup: _description_ + """ + apps = [] + for app in group.application_objects.all(): + self.load_application(app) + apps.append(app.name) + return self.get_or_add(self.applicationgroup(name=group.name, applications=sorted(apps))) + + def load_application(self, app: "ApplicationObject") -> "NautobotApplicationObject": + """Loads a application object. + + Args: + app (ApplicationObject): _description_ + + Returns: + NautobotApplicationObject: _description_ + """ + members = [ + i.get_destination().name + for i in RelationshipAssociation.objects.filter(source_id=app.id, relationship_id=self._app_relationship.id) + if i.get_destination() + ] + for i in members: + nested_app = ApplicationObject.objects.get(name=i) + self.get_or_add( + self.application( + name=nested_app.name, + category=nested_app.category, + subcategory=nested_app.subcategory, + technology=nested_app.technology, + risk=nested_app.risk, + default_ip_protocol=nested_app.default_ip_protocol, + default_type=nested_app.default_type, + description=nested_app.description, + type=nested_app.custom_field_data.get("application-type"), + members=[], + ) + ) + return self.get_or_add( + self.application( + name=app.name, + category=app.category, + subcategory=app.subcategory, + technology=app.technology, + risk=app.risk, + default_ip_protocol=app.default_ip_protocol, + default_type=app.default_type, + description=app.description, + type=app.custom_field_data.get("application-type"), + members=sorted(members), + ) + ) diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/diffsync/adapters/panorama.py b/webinars/panorama-ssot/nautobot_ssot_panorama/diffsync/adapters/panorama.py new file mode 100644 index 0000000..81a64e1 --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/diffsync/adapters/panorama.py @@ -0,0 +1,452 @@ +"""Nautobot SSoT Panorama Adapter for Panorama SSoT plugin.""" +from diffsync import DiffSync, DiffSyncModel +from diffsync.exceptions import ObjectNotFound +from panos.device import Vsys +from panos.errors import PanDeviceXapiError +from panos.firewall import Firewall +from panos.policies import SecurityRule +from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices + + +from nautobot_ssot_panorama.diffsync.models.panorama import ( + PanoramaAddressObject, + PanoramaAddressGroup, + PanoramaApplication, + PanoramaApplicationGroup, + PanoramaDeviceGroup, + PanoramaFirewall, + PanoramaServiceObject, + PanoramaServiceGroup, + PanoramaUserObjectGroup, + PanoramaZone, + PanoramaPolicy, + PanoramaPolicyRule, + PanoramaVsys, +) +from nautobot_ssot_panorama.utils.panorama import Panorama + + +class PanoramaAdapter(DiffSync): + """DiffSync adapter for Panorama.""" + + addressobject = PanoramaAddressObject + addressgroup = PanoramaAddressGroup + application = PanoramaApplication + applicationgroup = PanoramaApplicationGroup + devicegroup = PanoramaDeviceGroup + firewall = PanoramaFirewall + serviceobject = PanoramaServiceObject + servicegroup = PanoramaServiceGroup + userobjectgroup = PanoramaUserObjectGroup + zone = PanoramaZone + policy = PanoramaPolicy + policyrule = PanoramaPolicyRule + vsys = PanoramaVsys + + top_level = [ + "addressobject", + "addressgroup", + "serviceobject", + "servicegroup", + "userobjectgroup", + "application", + "applicationgroup", + "firewall", + "zone", + "vsys", + "policyrule", + "policy", + "devicegroup", + ] + + def __init__(self, *args, job=None, sync=None, pan=None, **kwargs): + """Initialize Panorama. + + Args: + job (object, optional): Panorama job. Defaults to None. + sync (object, optional): Panorama DiffSync. Defaults to None. + pan (ControlPlaneSystem, optional): Panoroama instance. + """ + super().__init__(*args, **kwargs) + self.job = job + self.sync = sync + self.pan = pan + self.job.log_info(pan, "Using selected Panorama for connection.") + self.pano = Panorama( + url=pan.fqdn_or_ip, + username=pan.secrets_group.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_HTTP, + secret_type=SecretsGroupSecretTypeChoices.TYPE_USERNAME, + ), + password=pan.secrets_group.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_HTTP, + secret_type=SecretsGroupSecretTypeChoices.TYPE_PASSWORD, + ), + port=pan.port, + verify=pan.verify_ssl, + ) + self._backend = "Panorama" + self._loaded_apps = [] + + def get_or_add(self, obj: "DiffSyncModel") -> "DiffSyncModel": + """Ensures a model is added. + + Args: + obj (DiffSyncModel): Instance of model + + Returns: + DiffSyncModel: Instance of model that has been added + """ + model = obj.get_type() + ids = obj.get_unique_id() + try: + return self.store.get(model=model, identifier=ids) + except ObjectNotFound: + self.add(obj=obj) + return obj + + def load(self): + """Load data from Panorama into DiffSync models.""" + self.job.log_info(self.pan, f"Caching Address Groups from {self._backend}") + self.pano.address.retrieve_address_groups() + self.job.log_info(self.pan, f"Caching Address Objects from {self._backend}") + self.pano.address.retrieve_address_objects() + self.job.log_info(self.pan, f"Caching Service Groups from {self._backend}") + self.pano.service.retrieve_service_groups() + self.job.log_info(self.pan, f"Caching Service Objects from {self._backend}") + self.pano.service.retrieve_service_objects() + self.job.log_info(self.pan, f"Caching Application Groups from {self._backend}") + self.pano.application.retrieve_application_groups() + self.job.log_info(self.pan, f"Caching Application Objects from {self._backend}") + self.pano.application.retrieve_application_objects() + self.job.log_info(self.pan, f"Caching User Groups from {self._backend}") + self.pano.user.retrieve_dynamic_user_groups() + self.job.log_info(self.pan, f"Caching Zones from {self._backend}") + self.pano.firewall.retrieve_zones() + self.job.log_info(self.pan, f"Caching Policies & Rules from {self._backend}") + self.pano.policy.retrieve_security_rules() + self.job.log_info(self.pan, f"Caching Firewalls from {self._backend}") + self.pano.firewall.retrieve_firewalls() + self.job.log_info(self.pan, f"Caching Vsys from {self._backend}") + self.pano.firewall.retrieve_vsys() + self.job.log_info(self.pan, f"Loading objects from {self._backend} via cache") + self.load_cached_objects() + + def load_cached_objects(self): + """Load objects from cache.""" + for group_name, group in self.pano.device_group.device_groups.items(): + parent = self.pano.device_group.get_parent(group_name) + if not parent: + parent = "shared" + vsyss = [] + firewalls = [] + for child in group.children: + if isinstance(child, Vsys): + self.get_or_add( + self.vsys(name=child.name, parent=child.parent.serial, interfaces=sorted(child.interface)) + ) + vsyss.append({"parent": child.parent.serial, "name": child.name}) + elif isinstance(child, Firewall): + ifaces = [] + try: + for vsys in Vsys.refreshall(child): + self.get_or_add( + self.vsys(name=vsys.name, parent=child.serial, interfaces=sorted(vsys.interface)) + ) + ifaces += vsys.interface + vsyss.append({"parent": child.serial, "name": vsys.name}) + except PanDeviceXapiError: + pass + self.get_or_add( + self.firewall( + name=self.pano.firewall.get_hostname(child), + serial=child.serial, + device_group=group.name, + interfaces=sorted(ifaces), + ) + ) + firewalls.append(child.serial) + self.add( + self.devicegroup( + name=group_name, + panorama=str(self.pan.id), + vsys=sorted(vsyss), + firewalls=sorted(firewalls), + pre_policy=self.load_policy("PRE", group_name), + post_policy=self.load_policy("POST", group_name), + parent=parent, + ) + ) + self.add( + self.devicegroup( + name="shared", + panorama=str(self.pan.id), + pre_policy=self.load_policy("PRE", "shared"), + post_policy=self.load_policy("POST", "shared"), + vsys=[], + firewalls=[], + ) + ) + + def load_policy(self, pre_post: str, group_name: str): + """Get or adds Policy and returns to be added to DeviceGroup. + + Args: + pre_post (str): Policy is pre or post + group_name (str): DeviceGroup name for easy lookup + + Returns: + PanoramaPolicy: DiffSyncModel of a Policy + """ + rulebase = self.pano.policy.policies[group_name][pre_post] + if len(rulebase) == 0: + return None + rules = [] + for rule in rulebase[0].children: + if not isinstance(rule, SecurityRule): + continue + + applications = [] + applicationgroups = [] + usergroups = [] + destserviceobjects = [] + destservicegroups = [] + sourcezone = None + destzone = None + sourceaddressobjects = [] + sourceaddressgroups = [] + destaddressobjects = [] + destaddressgroups = [] + rules.append(rule.name) + + # Load Apps + if rule.application != ["any"]: + for app in rule.application: + if self.pano.application.applications[app]["type"] == "container": + applications.append(app) + for nested in self.pano.application.applications[app]["value"].applications: + self.load_application(nested) + self.load_application(app) + elif self.pano.application.applications[app]["type"] == "object": + applications.append(app) + self.load_application(app) + else: + self.load_application_group(app) + applicationgroups.append(app) + + # Load Services + if rule.service != ["any"]: + for svc in rule.service: + if svc == "application-default": + continue + if self.pano.service.services[svc]["type"] == "object": + self.load_service_object(svc) + destserviceobjects.append(svc) + else: + self.load_service_group(svc) + destservicegroups.append(svc) + + # Load User + if rule.source_user != ["any"]: + for user in rule.source_user: + self.load_user_group(user) + usergroups.append(user) + + # Load Zones + if rule.fromzone != ["any"]: + self.load_zone(rule.fromzone[0]) + sourcezone = rule.fromzone[0] + if rule.tozone != ["any"]: + self.load_zone(rule.tozone[0]) + destzone = rule.tozone[0] + + # Load Source Address + if rule.source != ["any"]: + for addr in rule.source: + if self.pano.address.addresses[addr]["type"] == "object": + self.load_address_object(addr) + sourceaddressobjects.append(addr) + else: + self.load_address_group(addr) + sourceaddressgroups.append(addr) + + # Load Destination Address + if rule.destination != ["any"]: + for addr in rule.destination: + if self.pano.address.addresses[addr]["type"] == "object": + self.load_address_object(addr) + destaddressobjects.append(addr) + else: + self.load_address_group(addr) + destaddressgroups.append(addr) + + # Create base rule + self.get_or_add( + self.policyrule( + name=rule.name, + action=rule.action, + log=True if rule.log_end or rule.log_start else False, + index=rulebase[0].children.index(rule) + 1, + applications=sorted(applications), + applicationgroups=sorted(applicationgroups), + usergroups=sorted(usergroups), + destserviceobjects=sorted(destserviceobjects), + destservicegroups=sorted(destservicegroups), + sourcezone=sourcezone, + destzone=destzone, + sourceaddressobjects=sorted(sourceaddressobjects), + sourceaddressgroups=sorted(sourceaddressgroups), + destaddressobjects=sorted(destaddressobjects), + destaddressgroups=sorted(destaddressgroups), + parent=group_name, + pre_post=pre_post, + ) + ) + if not rules: + return None + self.get_or_add(self.policy(name=f"{group_name}-{pre_post}", policyrule_names=sorted(rules))) + return f"{group_name}-{pre_post}" + + def load_address_object(self, name: str) -> "PanoramaAddressObject": + """Loads a address object. + + Args: + name (str): Name of the object + + Returns: + PanoramaAddressObject: _description_ + """ + address = self.pano.address.addresses[name]["value"] + if address.type == "ip-wildcard": + raise ValueError("Not Supported") + return self.get_or_add(self.addressobject(name=address.name, address=address.value, type=address.type)) + + def load_address_group(self, name: str) -> "PanoramaAddressGroup": + """Loads a address group. + + Args: + name (str): Name of the object + + Returns: + PanoramaAddressGroup: _description_ + """ + group = self.pano.address.addresses[name]["value"] + for addr in group.static_value: + self.load_address_object(addr) + return self.get_or_add( + self.addressgroup( + name=group.name, + type="static" if isinstance(group.static_value, list) else "dynamic", + filter=group.dynamic_value, + addressobjects=sorted(group.static_value) if isinstance(group.static_value, list) else [], + ) + ) + + def load_zone(self, name: str) -> "PanoramaZone": + """Loads a zone. + + Args: + name (str): Name of the object. + to_from (str): To or from. + + Returns: + PanoramaZone: _description_ + """ + firewalls = {} + for firewall, zones in self.pano.firewall.zones.items(): + if zones.get(name): + firewalls[firewall] = zones[name].interface + return self.get_or_add(self.zone(name=name, firewalls=firewalls)) + + def load_service_group(self, name: str) -> "PanoramaServiceGroup": + """Loads a service group. + + Args: + name (str): Name of the object. + + Returns: + PanoramaServiceGroup: _description_ + """ + group = self.pano.service.services[name]["value"] + for svc in group.value: + self.load_service_object(svc) + return self.get_or_add( + self.servicegroup(name=group.name, serviceobjects=sorted(group.value if group.value else [])) + ) + + def load_service_object(self, name: str) -> "PanoramaServiceObject": + """Loads a service object. + + Args: + name (str): Name of the object. + + Returns: + PanoramaServiceObject: _description_ + """ + service = self.pano.service.services[name]["value"] + return self.get_or_add( + self.serviceobject( + name=service.name, + port=service.destination_port, + protocol=self.pano.service.find_proper_protocol(service.protocol), + ) + ) + + def load_user_group(self, name: str) -> "PanoramaUserObjectGroup": + """Loads a user group. + + Args: + name (str): Name of the object. + + Returns: + PanoramaUserObjectGroup: _description_ + """ + return self.get_or_add(self.userobjectgroup(name=self.pano.user.users[name]["value"].name)) + + def load_application(self, name: str) -> "PanoramaApplication": + """Loads a application object. + + Args: + name (str): Name of the object. + + Returns: + PanoramaApplication: _description_ + """ + app_obj = self.pano.application.applications[name]["value"] + if self.pano.application.applications[name]["type"] == "container": + app = self.application( + name=app_obj.name, + type=self.pano.application.applications[name]["type"], + members=sorted(app_obj.applications), + ) + else: + app = self.application( + name=app_obj.name, + category=app_obj.category, + subcategory=app_obj.subcategory, + technology=app_obj.technology, + risk=app_obj.risk, + default_ip_protocol=app_obj.default_ip_protocol, + default_type=" ".join(app_obj.default_port), + description=app_obj.description, + type=self.pano.application.applications[name]["type"], + members=[], + ) + + return self.get_or_add(app) + + def load_application_group(self, name: str) -> "PanoramaApplicationGroup": + """Loads a application group. + + Args: + name (str): Name of app group + + Returns: + PanoramaApplicationGroup: _description_ + """ + group = self.pano.application.applications[name]["value"] + for app in group.value: + self.load_application(app) + return self.get_or_add( + self.applicationgroup(name=group.name, applications=sorted(group.value if group.value else [])) + ) diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/diffsync/models/__init__.py b/webinars/panorama-ssot/nautobot_ssot_panorama/diffsync/models/__init__.py new file mode 100644 index 0000000..7fe5a54 --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/diffsync/models/__init__.py @@ -0,0 +1 @@ +"""DiffSync models and adapters for the Panorama SSoT plugin.""" diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/diffsync/models/base.py b/webinars/panorama-ssot/nautobot_ssot_panorama/diffsync/models/base.py new file mode 100644 index 0000000..d55588a --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/diffsync/models/base.py @@ -0,0 +1,219 @@ +"""DiffSyncModel subclasses for Nautobot-to-Panorama data sync.""" +from typing import List, Optional +from diffsync import DiffSyncModel + + +class Firewall(DiffSyncModel): + """DiffSync model for Panorama Firewall.""" + + _modelname = "firewall" + _identifiers = ("serial",) + _attributes = ("name", "interfaces", "device_group") + + name: str + serial: str + device_group: Optional[str] + interfaces: Optional[list] + + +class Vsys(DiffSyncModel): + """DiffSync model for Panorama Vsys.""" + + _modelname = "vsys" + _identifiers = ("parent", "name") + _attributes = ("interfaces",) + + name: str + parent: str + interfaces: Optional[list] + + +class DeviceGroup(DiffSyncModel): + """DiffSync model for Panorama DeviceGroup.""" + + _modelname = "devicegroup" + _identifiers = ("name",) + _attributes = ("parent", "vsys", "firewalls", "panorama", "pre_policy", "post_policy") + + name: str + panorama: Optional[str] + parent: Optional[str] + vsys: Optional[list] + firewalls: Optional[list] + pre_policy: Optional[str] + post_policy: Optional[str] + + +class AddressObject(DiffSyncModel): + """DiffSync model for Panorama AddressObject.""" + + _modelname = "addressobject" + _identifiers = ("name",) + _attributes = ( + "address", + "type", + ) + + name: str + address: str + type: str + + +class AddressGroup(DiffSyncModel): + """DiffSync model for Panorama AddressGroup.""" + + _modelname = "addressgroup" + _identifiers = ("name",) + _attributes = ("addressobjects", "type", "filter") + + name: str + addressobjects: list + type: Optional[str] + filter: Optional[str] + + +class Application(DiffSyncModel): + """DiffSync model for Panorama Application.""" + + _modelname = "application" + _identifiers = ("name",) + _attributes = ( + "category", + "subcategory", + "technology", + "risk", + "default_type", + "default_ip_protocol", + "description", + "type", + "members", + ) + + name: str + category: Optional[str] + subcategory: Optional[str] + technology: Optional[str] + risk: Optional[int] + default_type: Optional[str] + default_ip_protocol: Optional[str] + description: Optional[str] + type: Optional[str] + members: Optional[list] + + +class ApplicationGroup(DiffSyncModel): + """DiffSync model for Panorama ApplicationGroup.""" + + _modelname = "applicationgroup" + _identifiers = ("name",) + _attributes = ("applications",) + + name: str + applications: list + + +class ServiceObject(DiffSyncModel): + """DiffSync model for Panorama ServiceObject.""" + + _modelname = "serviceobject" + _identifiers = ("name",) + _attributes = ( + "port", + "protocol", + ) + + name: str + port: Optional[str] + protocol: str + + +class ServiceGroup(DiffSyncModel): + """DiffSync model for Panorama ServiceGroup.""" + + _modelname = "servicegroup" + _identifiers = ("name",) + _attributes = ("serviceobjects",) + + name: str + serviceobjects: list + + +class UserObjectGroup(DiffSyncModel): + """DiffSync model for Panorama UserObjectGroup.""" + + _modelname = "userobjectgroup" + _identifiers = ("name",) + + name: str + + +class Zone(DiffSyncModel): + """DiffSync model for Panorama Zone.""" + + _modelname = "zone" + _identifiers = ("name",) + _attributes = ("firewalls",) + + name: str + firewalls: dict + + +class PolicyRule(DiffSyncModel): + """DiffSync model for Panorama PolicyRule.""" + + _modelname = "policyrule" + _identifiers = ("name", "parent", "pre_post") + _attributes = ( + "sourceserviceobjects", + "sourceservicegroups", + "destserviceobjects", + "destservicegroups", + "sourcezone", + "destzone", + "sourceaddressobjects", + "sourceaddressgroups", + "destaddressobjects", + "destaddressgroups", + "action", + "usergroups", + "log", + "index", + "applications", + "applicationgroups", + ) + + # Required + action: str + log: bool + name: str + index: int + parent: str + pre_post: str + + # Optional Source, empty translates to any + sourceaddressgroups: Optional[list] + sourceaddressobjects: Optional[list] + sourceservicegroups: Optional[list] + sourceserviceobjects: Optional[list] + sourcezone: Optional[str] + usergroups: Optional[list] + + # Optional Destination, empty translates to any + destaddressgroups: Optional[list] + destaddressobjects: Optional[list] + destservicegroups: Optional[list] + destserviceobjects: Optional[list] + applications: Optional[list] + applicationgroups: Optional[list] + destzone: Optional[str] + + +class Policy(DiffSyncModel): + """DiffSync model for Panorama Policy.""" + + _modelname = "policy" + _identifiers = ("name",) + _attributes = ("policyrule_names",) + + name: str + policyrule_names: Optional[list] diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/diffsync/models/nautobot.py b/webinars/panorama-ssot/nautobot_ssot_panorama/diffsync/models/nautobot.py new file mode 100644 index 0000000..8336115 --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/diffsync/models/nautobot.py @@ -0,0 +1,243 @@ +"""Nautobot DiffSync models for Panorama SSoT.""" +from nautobot_ssot_panorama.diffsync.models.base import ( + AddressObject, + AddressGroup, + Application, + ApplicationGroup, + DeviceGroup, + Firewall, + ServiceObject, + ServiceGroup, + Zone, + UserObjectGroup, + PolicyRule, + Policy, + Vsys, +) +from nautobot_ssot_panorama.utils.nautobot import Nautobot + + +NAUTOBOT = Nautobot() + + +class NautobotVsys(Vsys): + """Nautobot implementation of Panorama Vsys model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Vsys in Nautobot from NautobotVsys object.""" + NAUTOBOT.create_vsys(ids, attrs) + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Vsys in Nautobot from NautobotVsys object.""" + NAUTOBOT.update_vsys(self.name, self.parent, attrs) + return super().update(attrs) + + def delete(self): + """Delete Vsys in Nautobot from NautobotVsys object.""" + return self + + +class NautobotFirewall(Firewall): + """Nautobot implementation of Panorama Firewall model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Firewall in Nautobot from NautobotFirewall object.""" + NAUTOBOT.create_firewall(ids, attrs) + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Firewall in Nautobot from NautobotFirewall object.""" + NAUTOBOT.update_firewall(self.serial, attrs) + return super().update(attrs) + + def delete(self): + """Delete Firewall in Nautobot from NautobotFirewall object.""" + return self + + +class NautobotDeviceGroup(DeviceGroup): + """Nautobot implementation of Panorama DeviceGroup model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create DeviceGroup in Nautobot from NautobotDeviceGroup object.""" + NAUTOBOT.create_device_group(ids, attrs) + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update DeviceGroup in Nautobot from NautobotDeviceGroup object.""" + NAUTOBOT.update_device_group(self.name, attrs) + return super().update(attrs) + + def delete(self): + """Delete DeviceGroup in Nautobot from NautobotDeviceGroup object.""" + return self + + +class NautobotAddressObject(AddressObject): + """Nautobot implementation of Panorama AddressObject model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create AddressObject in Nautobot from NautobotAddressObject object.""" + NAUTOBOT.create_address_object(ids, attrs) + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update AddressObject in Nautobot from NautobotAddressObject object.""" + NAUTOBOT.update_address_object(self.name, self.type, attrs) + return super().update(attrs) + + def delete(self): + """Delete AddressObject in Nautobot from NautobotAddressObject object.""" + return self + + +class NautobotAddressGroup(AddressGroup): + """Nautobot implementation of Panorama AddressGroup model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create AddressGroup in Nautobot from NautobotAddressGroup object.""" + NAUTOBOT.create_address_group(ids, attrs) + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update AddressGroup in Nautobot from NautobotAddressGroup object.""" + NAUTOBOT.update_address_group(self.name, attrs) + return super().update(attrs) + + +class NautobotApplicationObject(Application): + """Nautobot implementation of Panorama ApplicationObject model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create ApplicationObject in Nautobot from NautobotApplicationObject object.""" + NAUTOBOT.create_application_object(ids, attrs) + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update ApplicationObject in Nautobot from NautobotApplicationObject object.""" + NAUTOBOT.update_application_object(self.name, attrs) + return super().update(attrs) + + def delete(self): + """Delete ApplicationObject in Nautobot from NautobotApplicationObject object.""" + return self + + +class NautobotApplicationGroup(ApplicationGroup): + """Nautobot implementation of Panorama ApplicationGroup model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create ApplicationGroup in Nautobot from NautobotApplicationGroup object.""" + NAUTOBOT.create_application_group(ids, attrs) + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update ApplicationGroup in Nautobot from NautobotApplicationGroup object.""" + NAUTOBOT.update_application_group(self.name, attrs) + return super().update(attrs) + + +class NautobotServiceObject(ServiceObject): + """Nautobot implementation of Panorama ServiceObject model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create ServiceObject in Nautobot from NautobotServiceObject object.""" + NAUTOBOT.create_service_object(ids, attrs) + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update ServiceObject in Nautobot from NautobotServiceObject object.""" + NAUTOBOT.update_service_object(self.name, attrs) + return super().update(attrs) + + def delete(self): + """Delete ServiceObject in Nautobot from NautobotServiceObject object.""" + return self + + +class NautobotServiceGroup(ServiceGroup): + """Nautobot implementation of Panorama ServiceGroup model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create ServiceGroup in Nautobot from NautobotServiceGroup object.""" + NAUTOBOT.create_service_group(ids, attrs) + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update ServiceGroup in Nautobot from NautobotServiceGroup object.""" + NAUTOBOT.update_service_group(self.name, attrs) + return super().update(attrs) + + def delete(self): + """Delete ServiceGroup in Nautobot from NautobotServiceGroup object.""" + return self + + +class NautobotUserObjectGroup(UserObjectGroup): + """Nautobot implementation of Panorama UserObjectGroup model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create UserObjectGroup in Nautobot from NautobotUserObjectGroup object.""" + NAUTOBOT.create_user_object_group(ids) + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update UserObjectGroup in Nautobot from NautobotUserObjectGroup object.""" + self.diffsync.job.log_info("User Object Groups do not support update.") + return super().update(attrs) + + +class NautobotZone(Zone): + """Nautobot implementation of Panorama Zone model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Zone in Nautobot from NautobotZone object.""" + NAUTOBOT.create_zone(ids, attrs["firewalls"]) + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Zone in Nautobot from NautobotZone object.""" + self.diffsync.job.log_info("Zones do not support update.") + return super().update(attrs) + + +class NautobotPolicyRule(PolicyRule): + """Nautobot implementation of Panorama PolicyRule model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create PolicyRule in Nautobot from NautobotPolicyRule object.""" + NAUTOBOT.create_policy_rule(ids, attrs) + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update PolicyRule in Nautobot from NautobotPolicyRule object.""" + NAUTOBOT.update_policy_rule(self.name, attrs) + return super().update(attrs) + + +class NautobotPolicy(Policy): + """Nautobot implementation of Panorama Policy model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Policy in Nautobot from NautobotPolicy object.""" + NAUTOBOT.create_policy(ids, attrs) + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Policy in Nautobot from NautobotPolicy object.""" + NAUTOBOT.update_policy(self.name, attrs) + return super().update(attrs) diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/diffsync/models/panorama.py b/webinars/panorama-ssot/nautobot_ssot_panorama/diffsync/models/panorama.py new file mode 100644 index 0000000..a07b677 --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/diffsync/models/panorama.py @@ -0,0 +1,300 @@ +"""Nautobot SSoT Panorama DiffSync models for Nautobot SSoT Panorama SSoT.""" + +from nautobot_ssot_panorama.diffsync.models.base import ( + AddressObject, + AddressGroup, + Application, + ApplicationGroup, + DeviceGroup, + Firewall, + ServiceObject, + ServiceGroup, + Zone, + UserObjectGroup, + PolicyRule, + Policy, + Vsys, +) + + +class PanoramaVsys(Vsys): + """Panorama implementation of Vsys model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Vsys in Panorama from PanoramaVsys object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Vsys in Panorama from PanoramaVsys object.""" + return super().update(attrs) + + def delete(self): + """Delete Vsys in Panorama from PanoramaVsys object.""" + return self + + +class PanoramaFirewall(Firewall): + """Panorama implementation of Firewall model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Firewall in Panorama from PanoramaFirewall object.""" + diffsync.pano.firewall.create_firewall( + name=attrs["name"], + serial=ids["serial"], + interfaces=attrs.get("interfaces", []), + group=attrs["device_group"], + ) + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Firewall in Panorama from PanoramaFirewall object.""" + return super().update(attrs) + + def delete(self): + """Delete Firewall in Panorama from PanoramaFirewall object.""" + return self + + +class PanoramaDeviceGroup(DeviceGroup): + """Panorama implementation of DeviceGroup model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create DeviceGroup in Panorama from PanoramaDeviceGroup object.""" + diffsync.pano.device_group.create_device_group(name=ids["name"], parent=attrs.get("parent")) + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update DeviceGroup in Nautobot from PanoramaDeviceGroup object.""" + return super().update(attrs) + + def delete(self): + """Delete DeviceGroup in Nautobot from PanoramaDeviceGroup object.""" + return self + + +class PanoramaAddressObject(AddressObject): + """Panorama implementation of AddressObject DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Device in Panorama from PanoramaAddressObject object.""" + diffsync.pano.address.create_address_object(name=ids["name"], address=attrs["address"], addr_type=attrs["type"]) + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Device in Panorama from PanoramaAddressObject object.""" + self.diffsync.pano.address.update_address_object(name=self.name, attrs=attrs) + return super().update(attrs) + + def delete(self): + """Delete Device in Panorama from PanoramaAddressObject object.""" + return self + + +class PanoramaAddressGroup(AddressGroup): + """Panorama implementation of AddressGroup DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Device in Panorama from PanoramaAddressObject object.""" + diffsync.pano.address.create_address_group( + name=ids["name"], addrs=attrs["addressobjects"], grp_type=attrs["type"], filter=attrs["filter"] + ) + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Device in Panorama from PanoramaAddressObject object.""" + return super().update(attrs) + + def delete(self): + """Delete Device in Panorama from PanoramaAddressObject object.""" + return self + + +class PanoramaApplication(Application): + """Panorama implementation of ApplicationObject DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Device in Panorama from PanoramaApplication object.""" + diffsync.pano.application.create_application(name=ids["name"], attrs=attrs) + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Device in Panorama from PanoramaApplication object.""" + return super().update(attrs) + + def delete(self): + """Delete Device in Panorama from PanoramaApplication object.""" + return self + + +class PanoramaApplicationGroup(ApplicationGroup): + """Panorama implementation of ApplicationGroup DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create application group in Panorama from PanoramaApplicationGroup object.""" + diffsync.pano.application.create_application_group(name=ids["name"], applications=attrs["applications"]) + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update application group in Panorama from PanoramaApplicationGroup object.""" + self.diffsync.pano.application.update_application_group(name=self.name, applications=attrs["applications"]) + return super().update(attrs) + + def delete(self): + """Delete application in Panorama from PanoramaApplicationGroup object.""" + return self + + +class PanoramaServiceObject(ServiceObject): + """Panorama implementation of Device DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Device in Panorama from PanoramaServiceObject object.""" + diffsync.pano.service.create_service_object( + name=ids["name"], + port=attrs["port"], + protocol=attrs["protocol"], + ) + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Device in Panorama from PanoramaServiceObject object.""" + return super().update(attrs) + + def delete(self): + """Delete Device in Panorama from PanoramaServiceObject object.""" + return self + + +class PanoramaServiceGroup(ServiceGroup): + """Panorama implementation of ServiceGroup DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Device in Panorama from PanoramaServiceGroup object.""" + diffsync.pano.service.create_service_group(name=ids["name"], svc_objs=attrs["serviceobjects"]) + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Device in Panorama from PanoramaServiceGroup object.""" + return super().update(attrs) + + def delete(self): + """Delete Device in Panorama from PanoramaServiceGroup object.""" + return self + + +class PanoramaUserObjectGroup(UserObjectGroup): + """Panorama implementation of UserObjectGroup DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create DynamicUserGroup in Panorama from PanoramaUserObjectGroup object.""" + diffsync.pano.user.create_dynamic_user_group(name=ids["name"]) + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update DynamicUserGroup in Panorama from PanoramaUserObjectGroup object.""" + return super().update(attrs) + + def delete(self): + """Delete DynamicUserGroup in Panorama from PanoramaUserObjectGroup object.""" + return self + + +class PanoramaZone(Zone): + """Panorama implementation of Zone DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Device in Panorama from PanoramaZone object.""" + diffsync.pano.firewall.create_zone(name=ids["name"], firewalls=attrs["firewalls"]) + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Device in Panorama from PanoramaZone object.""" + return super().update(attrs) + + def delete(self): + """Delete Device in Panorama from PanoramaZone object.""" + return self + + +class PanoramaPolicyRule(PolicyRule): + """Panorama implementation of PolicyRule DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Device in Panorama from PanoramaPolicyRule object.""" + parent = diffsync.pano.policy.device_groups[ids["parent"]] + pre_post = ids["pre_post"] + source = attrs["sourceaddressobjects"] + attrs["sourceaddressgroups"] + destination = attrs["destaddressobjects"] + attrs["destaddressgroups"] + service = attrs["destserviceobjects"] + attrs["destservicegroups"] + application = attrs["applications"] + attrs["applicationgroups"] + diffsync.pano.policy.create_security_rule( + parent, + pre_post, + name=ids["name"], + source=source if source else ["any"], + destination=destination if destination else ["any"], + service=service if service else ["any"], + application=application if application else ["any"], + tozone=[attrs["sourcezone"]] if attrs.get("sourcezone") else ["any"], + fromzone=[attrs["destzone"]] if attrs.get("destzone") else ["any"], + action=attrs["action"], + ) + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Device in Panorama from PanoramaPolicyRule object.""" + parent = self.diffsync.pano.policy.device_groups[self.parent] + updates = {} + source = attrs.get("sourceaddressobjects", []) + attrs.get("sourceaddressgroups", []) + if source: + updates.update({"source": source}) + destination = attrs.get("destaddressobjects", []) + attrs.get("destaddressobjects", []) + if destination: + updates.update({"destination": destination}) + service = attrs.get("destserviceobjects", []) + attrs.get("destservicegroups", []) + if service: + updates.update({"service": service}) + application = attrs.get("applications", []) + attrs.get("applicationgroups", []) + if application: + updates.update({"application": application}) + if "sourcezone" in attrs: + updates.update({"fromzone": [attrs["sourcezone"]] if attrs.get("sourcezone") else ["any"]}) + if "destzone" in attrs: + updates.update({"tozone": [attrs["destzone"]] if attrs.get("destzone") else ["any"]}) + if attrs.get("action"): + updates.update({"action": attrs["action"]}) + self.diffsync.pano.policy.update_security_rule(parent, self.pre_post, name=self.name, **updates) + return super().update(attrs) + + def delete(self): + """Delete Device in Panorama from PanoramaPolicyRule object.""" + return self + + +class PanoramaPolicy(Policy): + """Panorama implementation of Policy DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Device in Panorama from PanoramaPolicy object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Device in Panorama from PanoramaPolicy object.""" + return super().update(attrs) + + def delete(self): + """Delete Device in Panorama from PanoramaPolicy object.""" + return self diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/filters.py b/webinars/panorama-ssot/nautobot_ssot_panorama/filters.py new file mode 100644 index 0000000..aed4ae3 --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/filters.py @@ -0,0 +1,57 @@ +"""Plugin filters.""" +from nautobot.utilities.filters import BaseFilterSet, SearchFilter + +from nautobot_ssot_panorama.models import VirtualSystem, LogicalGroup, ControlPlaneSystem + + +class ControlPlaneSystemFilterSet(BaseFilterSet): + """API filter for filtering ControlPlaneSystem objects.""" + + q = SearchFilter( + filter_predicates={ + "name": "icontains", + "system_id": "icontains", + }, + ) + + class Meta: + """Meta class.""" + + model = ControlPlaneSystem + fields = ["name", "device", "fqdn_or_ip"] + + +class VirtualSystemFilterSet(BaseFilterSet): + """API filter for filtering VirtualSystem objects.""" + + q = SearchFilter( + filter_predicates={ + "name": "icontains", + "system_id": "icontains", + }, + ) + + class Meta: + """Meta class.""" + + model = VirtualSystem + fields = [ + "name", + "system_id", + ] + + +class LogicalGroupFilterSet(BaseFilterSet): + """API filter for filtering LogicalGroup objects.""" + + q = SearchFilter( + filter_predicates={ + "name": "icontains", + }, + ) + + class Meta: + """Meta class.""" + + model = LogicalGroup + fields = ["name", "parent", "children"] diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/forms.py b/webinars/panorama-ssot/nautobot_ssot_panorama/forms.py new file mode 100644 index 0000000..cec3321 --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/forms.py @@ -0,0 +1,92 @@ +"""Plugin forms.""" +from django import forms + +from nautobot.dcim.models import Device, Interface +from nautobot.extras.forms import NautobotModelForm +from nautobot.extras.models import SecretsGroup +from nautobot.utilities.forms import ( + BootstrapMixin, + DynamicModelChoiceField, + DynamicModelMultipleChoiceField, +) +from nautobot_firewall_models.models import Policy + +from nautobot_ssot_panorama.models import VirtualSystem, LogicalGroup, ControlPlaneSystem + + +class ControlPlaneSystemFilterForm(BootstrapMixin, forms.Form): + """Filtering/search form for `ControlPlaneSystem` objects.""" + + model = ControlPlaneSystem + q = forms.CharField(required=False, label="Search") + name = forms.CharField(max_length=20, required=False) + device = DynamicModelChoiceField(queryset=Device.objects.all(), required=False) + + +class ControlPlaneSystemForm(NautobotModelForm): # pylint: disable=too-many-ancestors + """Generic create/update form for `ControlPlaneSystem` objects.""" + + device = DynamicModelChoiceField(queryset=Device.objects.all(), required=False) + secrets_group = DynamicModelChoiceField( + queryset=SecretsGroup.objects.all(), required=True, label="Secrets Group (API user/pass)" + ) + + class Meta: + """Meta class.""" + + model = ControlPlaneSystem + fields = ["name", "device", "port", "fqdn_or_ip", "verify_ssl", "secrets_group"] + + +class VirtualSystemFilterForm(BootstrapMixin, forms.Form): + """Filtering/search form for `VirtualSystem` objects.""" + + model = VirtualSystem + q = forms.CharField(required=False, label="Search") + name = forms.CharField(max_length=20, required=False) + system_id = forms.IntegerField(required=False) + device = DynamicModelChoiceField(queryset=Device.objects.all(), label="Parent Device", required=False) + + +class VirtualSystemForm(NautobotModelForm): # pylint: disable=too-many-ancestors + """Generic create/update form for `VirtualSystem` objects.""" + + device = DynamicModelChoiceField(queryset=Device.objects.all(), label="Parent Device", required=True) + interfaces = DynamicModelMultipleChoiceField( + queryset=Interface.objects.all(), + label="Assigned Interfaces", + required=True, + query_params={"device_id": "$device"}, + ) + + class Meta: + """Meta class.""" + + model = VirtualSystem + fields = ["name", "system_id", "device", "interfaces"] + + +class LogicalGroupFilterForm(BootstrapMixin, forms.Form): + """Filtering/search form for `LogicalGroup` objects.""" + + model = LogicalGroup + q = forms.CharField(required=False, label="Search") + name = forms.CharField(max_length=20, required=False) + + +class LogicalGroupForm(NautobotModelForm): # pylint: disable=too-many-ancestors + """Generic create/update form for `LogicalGroup` objects.""" + + devices = DynamicModelMultipleChoiceField(queryset=Device.objects.all(), label="Assigned Devices", required=False) + virtual_systems = DynamicModelMultipleChoiceField( + queryset=VirtualSystem.objects.all(), label="Assigned Virtual Systems", required=False + ) + control_plane = DynamicModelChoiceField(queryset=ControlPlaneSystem.objects.all(), required=False) + pre_policy = DynamicModelChoiceField(queryset=Policy.objects.all(), required=False) + post_policy = DynamicModelChoiceField(queryset=Policy.objects.all(), required=False) + + class Meta: + """Meta class.""" + + model = LogicalGroup + fields = ["name", "parent", "devices", "virtual_systems", "control_plane", "pre_policy", "post_policy"] diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/homepage.py b/webinars/panorama-ssot/nautobot_ssot_panorama/homepage.py new file mode 100644 index 0000000..318f920 --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/homepage.py @@ -0,0 +1,37 @@ +"""Adds plugin items to homepage.""" +from nautobot.core.apps import HomePageItem, HomePagePanel + +from nautobot_ssot_panorama.models import VirtualSystem, LogicalGroup, ControlPlaneSystem + +layout = ( + HomePagePanel( + weight=150, + name="Security", + items=( + HomePageItem( + name="Control Plane Systems", + model=ControlPlaneSystem, + weight=100, + link="plugins:nautobot_ssot_panorama:controlplanesystem_list", + description="Firewall Control Plane Systems", + permissions=["nautobot_ssot_panorama.view_controlplanesystem"], + ), + HomePageItem( + name="Virtual Systems", + model=VirtualSystem, + weight=100, + link="plugins:nautobot_ssot_panorama:virtualsystem_list", + description="Firewall Virtual Systems", + permissions=["nautobot_ssot_panorama.view_virtualsystem"], + ), + HomePageItem( + name="Logical Groups", + model=LogicalGroup, + weight=100, + link="plugins:nautobot_ssot_panorama:logicalgroup_list", + description="Firewall Logical Groups", + permissions=["nautobot_ssot_panorama.view_logicalgroup"], + ), + ), + ), +) diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/jobs.py b/webinars/panorama-ssot/nautobot_ssot_panorama/jobs.py new file mode 100644 index 0000000..de9b6a0 --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/jobs.py @@ -0,0 +1,256 @@ +"""Jobs for Panorama SSoT integration.""" + +from diffsync import DiffSyncFlags +from django.templatetags.static import static +from django.urls import reverse +from nautobot.extras.jobs import BooleanVar, Job, ObjectVar +from nautobot_golden_config.models import ConfigCompliance, ComplianceRule, ComplianceFeature +from nautobot_ssot.jobs.base import DataSource, DataTarget, DataMapping + +from nautobot_ssot_panorama.diffsync.adapters import panorama, nautobot +from nautobot_ssot_panorama.models import ControlPlaneSystem + + +name = "Panorama SSoT" # pylint: disable=invalid-name + + +class PanoramaDataSource(DataSource, Job): + """Panorama SSoT Data Source.""" + + debug = BooleanVar(description="Enable for more verbose debug logging", default=False) + panorama = ObjectVar(model=ControlPlaneSystem) + compliance = BooleanVar( + description="Run Golden Config Compliance (adapters may resync if not set to dry run).", default=True + ) + + def __init__(self): + """Initialize Panorama Data Source.""" + super().__init__() + self.diffsync_flags = ( + self.diffsync_flags | DiffSyncFlags.CONTINUE_ON_FAILURE + ) # | DiffSyncFlags.SKIP_UNMATCHED_DST + + class Meta: # pylint: disable=too-few-public-methods + """Meta data for Panorama.""" + + name = "Panorama to Nautobot" + data_source = "Panorama" + data_target = "Nautobot" + data_source_icon = static("nautobot_ssot_panorama/panorama.png") + description = "Sync information from Panorama to Nautobot" + + @classmethod + def config_information(cls): + """Dictionary describing the configuration of this DataSource.""" + return {} + + @classmethod + def data_mappings(cls): + """List describing the data mappings involved in this DataSource.""" + return ( + DataMapping( + "Address Object - Parent", + None, + "Address Object", + reverse("plugins:nautobot_firewall_models:addressobject_list"), + ), + DataMapping( + "Address Object - ip-range", None, "IP Range", reverse("plugins:nautobot_firewall_models:iprange_list") + ), + DataMapping("Address Object - fqdn", None, "FQDN", reverse("plugins:nautobot_firewall_models:fqdn_list")), + DataMapping("Address Object - net-mask (Prefix)", None, "Prefix", reverse("ipam:prefix_list")), + DataMapping("Address Object - net-mask (Host)", None, "IP Address", reverse("ipam:ipaddress_list")), + DataMapping("Address Object - ip-wildcard", None, "Not Supported", None), + DataMapping( + "Address Object Group", + None, + "Address Object Group", + reverse("plugins:nautobot_firewall_models:addressobjectgroup_list"), + ), + DataMapping( + "Dynamic Address Object Group", + None, + "Address Object Group + Custom Field", + reverse("plugins:nautobot_firewall_models:addressobjectgroup_list"), + ), + DataMapping( + "Service Object", None, "Service Object", reverse("plugins:nautobot_firewall_models:serviceobject_list") + ), + DataMapping( + "Service Object Group", + None, + "Service Object Group", + reverse("plugins:nautobot_firewall_models:serviceobjectgroup_list"), + ), + DataMapping( + "Application Object", + None, + "Application Object", + reverse("plugins:nautobot_firewall_models:applicationobject_list"), + ), + DataMapping( + "Application Container", + None, + "Application Object + Relationship", + reverse("plugins:nautobot_firewall_models:applicationobject_list"), + ), + DataMapping( + "Application Object Group", + None, + "Application Object Group", + reverse("plugins:nautobot_firewall_models:applicationobjectgroup_list"), + ), + DataMapping( + "Dynamic User Object Group", + None, + "User Object Group + Custom Field", + reverse("plugins:nautobot_firewall_models:applicationobjectgroup_list"), + ), + DataMapping("Zone", None, "Zone", reverse("plugins:nautobot_firewall_models:zone_list")), + DataMapping("Rule", None, "Policy Rule", reverse("plugins:nautobot_firewall_models:policyrule_list")), + DataMapping("Policy", None, "Policy", reverse("plugins:nautobot_firewall_models:policy_list")), + DataMapping( + "Device Group", None, "Logical Group", reverse("plugins:nautobot_ssot_panorama:logicalgroup_list") + ), + DataMapping("VSYS", None, "Virtual System", reverse("plugins:nautobot_ssot_panorama:virtualsystem_list")), + DataMapping("Firewall", None, "Device", reverse("dcim:device_list")), + DataMapping( + "Panorama", + None, + "Control Plane System", + reverse("plugins:nautobot_ssot_panorama:controlplanesystem_list"), + ), + ) + + def load_source_adapter(self): + """Load data from Panorama into DiffSync models.""" + self.source_adapter = panorama.PanoramaAdapter(job=self, sync=self.sync, pan=self.kwargs["panorama"]) + self.source_adapter.load() + self.source_value = self.source_adapter.dict() + + def load_target_adapter(self): + """Load data from Nautobot into DiffSync models.""" + self.target_adapter = nautobot.NautobotAdapter(job=self, sync=self.sync) + self.target_adapter.load() + + def post_run(self): + """Overloaded to add compliance.""" + if self.kwargs["compliance"]: + if not self.kwargs["dry_run"]: + self.target_adapter = nautobot.NautobotAdapter(job=self, sync=self.sync) + self.target_adapter.load() + nautobot_adpt = self.target_adapter.dict() + pan_adpt = self.source_adapter.dict() + device = self.kwargs["panorama"].device + for model in [ + "addressobject", + "addressgroup", + "application", + "applicationgroup", + "devicegroup", + "userobjectgroup", + "firewall", + "policy", + "policyrule", + "serviceobject", + "servicegroup", + "vsys", + "zone", + ]: + rule = ComplianceRule.objects.get(feature__slug=model) + intended = nautobot_adpt.get(model, {}) + actual = pan_adpt.get(model, {}) + try: + comp_obj = ConfigCompliance.objects.get(device=device, rule=rule) + comp_obj.intended = intended + comp_obj.actual = actual + comp_obj.validated_save() + except ConfigCompliance.DoesNotExist: + ConfigCompliance.objects.create(device=device, rule=rule, intended=intended, actual=actual) + + +# TODO: Implement this. +class PanoramaDataTarget(DataTarget, Job): + """Panorama SSoT Data Target.""" + + debug = BooleanVar(description="Enable for more verbose debug logging", default=False) + panorama = ObjectVar(model=ControlPlaneSystem) + compliance = BooleanVar( + description="Run Golden Config Compliance (adapters may resync if not set to dry run).", default=True + ) + + def __init__(self): + """Initialize Panorama Data Target.""" + super().__init__() + self.diffsync_flags = ( + self.diffsync_flags | DiffSyncFlags.CONTINUE_ON_FAILURE + ) # | DiffSyncFlags.SKIP_UNMATCHED_DST + + class Meta: # pylint: disable=too-few-public-methods + """Meta data for Panorama.""" + + name = "Nautobot to Panorama" + data_source = "Nautobot" + data_target = "Panorama" + data_target_icon = static("nautobot_ssot_panorama/panorama.png") + description = "Sync information from Nautobot to Panorama" + + @classmethod + def config_information(cls): + """Dictionary describing the configuration of this DataTarget.""" + return {} + + @classmethod + def data_mappings(cls): + """List describing the data mappings involved in this DataSource.""" + return () + + def load_source_adapter(self): + """Load data from Nautobot into DiffSync models.""" + self.source_adapter = nautobot.NautobotAdapter(job=self, sync=self.sync) + self.source_adapter.load() + self.source_value = self.source_adapter.dict() + + def load_target_adapter(self): + """Load data from Panorama into DiffSync models.""" + try: + self.target_adapter = panorama.PanoramaAdapter(job=self, sync=self.sync, pan=self.kwargs["panorama"]) + self.target_adapter.load() + except: + self.log_failure(self.kwargs["panorama"], "Authentication Error, please validate credentials.") + + def post_run(self): + """Overloaded to add compliance.""" + if self.kwargs["compliance"]: + if not self.kwargs["dry_run"]: + self.target_adapter = panorama.PanoramaAdapter(job=self, sync=self.sync, pan=self.kwargs["panorama"]) + self.target_adapter.load() + nautobot_adpt = self.source_adapter.dict() + pan_adpt = self.target_adapter.dict() + device = self.kwargs["panorama"].device + for model in [ + "zone", + "policyrule", + "userobjectgroup", + "firewall", + "addressgroup", + "vsys", + "addressobject", + "devicegroup", + "application", + "serviceobject", + "policy", + ]: + rule = ComplianceRule.objects.get(feature__slug=model) + intended = nautobot_adpt.get(model, {}) + actual = pan_adpt.get(model, {}) + try: + comp_obj = ConfigCompliance.objects.get(device=device, rule=rule) + comp_obj.intended = intended + comp_obj.actual = actual + comp_obj.validated_save() + except ConfigCompliance.DoesNotExist: + ConfigCompliance.objects.create(device=device, rule=rule, intended=intended, actual=actual) + + +jobs = [PanoramaDataSource, PanoramaDataTarget] diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/migrations/0001_initial.py b/webinars/panorama-ssot/nautobot_ssot_panorama/migrations/0001_initial.py new file mode 100644 index 0000000..b347452 --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/migrations/0001_initial.py @@ -0,0 +1,282 @@ +# Generated by Django 3.2.16 on 2022-12-11 23:52 + +import django.core.serializers.json +from django.db import migrations, models +import django.db.models.deletion +import nautobot.extras.models.mixins +import taggit.managers +import uuid + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ("nautobot_firewall_models", "0014_custom_status_application"), + ("dcim", "0019_device_redundancy_group_data_migration"), + ("extras", "0053_relationship_required_on"), + ] + + operations = [ + migrations.CreateModel( + name="ControlPlaneSystem", + fields=[ + ( + "id", + models.UUIDField( + default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True + ), + ), + ("created", models.DateField(auto_now_add=True, null=True)), + ("last_updated", models.DateTimeField(auto_now=True, null=True)), + ( + "_custom_field_data", + models.JSONField(blank=True, default=dict, encoder=django.core.serializers.json.DjangoJSONEncoder), + ), + ("name", models.CharField(max_length=100)), + ("verify_ssl", models.BooleanField(default=True)), + ("port", models.PositiveSmallIntegerField(default=443)), + ("fqdn_or_ip", models.CharField(max_length=100)), + ( + "device", + models.OneToOneField( + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="panorama", + to="dcim.device", + ), + ), + ( + "secrets_group", + models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="extras.secretsgroup", + ), + ), + ("tags", taggit.managers.TaggableManager(through="extras.TaggedItem", to="extras.Tag")), + ], + options={ + "verbose_name": "Control Plane System", + "verbose_name_plural": "Control Plane Systems", + "ordering": ["name"], + }, + bases=( + models.Model, + nautobot.extras.models.mixins.DynamicGroupMixin, + nautobot.extras.models.mixins.NotesMixin, + ), + ), + migrations.CreateModel( + name="LogicalGroup", + fields=[ + ( + "id", + models.UUIDField( + default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True + ), + ), + ("created", models.DateField(auto_now_add=True, null=True)), + ("last_updated", models.DateTimeField(auto_now=True, null=True)), + ( + "_custom_field_data", + models.JSONField(blank=True, default=dict, encoder=django.core.serializers.json.DjangoJSONEncoder), + ), + ("name", models.CharField(max_length=48)), + ( + "control_plane", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="logical_groups", + to="nautobot_ssot_panorama.controlplanesystem", + ), + ), + ], + options={ + "verbose_name": "Logical Group", + "verbose_name_plural": "Logical Groups", + "ordering": ["name"], + }, + bases=( + models.Model, + nautobot.extras.models.mixins.DynamicGroupMixin, + nautobot.extras.models.mixins.NotesMixin, + ), + ), + migrations.CreateModel( + name="VirtualSystem", + fields=[ + ( + "id", + models.UUIDField( + default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True + ), + ), + ("created", models.DateField(auto_now_add=True, null=True)), + ("last_updated", models.DateTimeField(auto_now=True, null=True)), + ( + "_custom_field_data", + models.JSONField(blank=True, default=dict, encoder=django.core.serializers.json.DjangoJSONEncoder), + ), + ("system_id", models.PositiveSmallIntegerField()), + ("name", models.CharField(max_length=48)), + ( + "device", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, related_name="vsys", to="dcim.device" + ), + ), + ], + options={ + "verbose_name": "Virtual System", + "verbose_name_plural": "Virtual Systems", + "ordering": ["name"], + }, + bases=( + models.Model, + nautobot.extras.models.mixins.DynamicGroupMixin, + nautobot.extras.models.mixins.NotesMixin, + ), + ), + migrations.CreateModel( + name="VirtualSystemAssociation", + fields=[ + ( + "id", + models.UUIDField( + default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True + ), + ), + ("iface", models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to="dcim.interface")), + ( + "vsys", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="nautobot_ssot_panorama.virtualsystem" + ), + ), + ], + options={ + "abstract": False, + }, + ), + migrations.AddField( + model_name="virtualsystem", + name="interfaces", + field=models.ManyToManyField( + related_name="assigned_vsys", + through="nautobot_ssot_panorama.VirtualSystemAssociation", + to="dcim.Interface", + ), + ), + migrations.AddField( + model_name="virtualsystem", + name="tags", + field=taggit.managers.TaggableManager(through="extras.TaggedItem", to="extras.Tag"), + ), + migrations.CreateModel( + name="LogicalGroupToVirtualSystem", + fields=[ + ( + "id", + models.UUIDField( + default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True + ), + ), + ( + "group", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="nautobot_ssot_panorama.logicalgroup" + ), + ), + ( + "vsys", + models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, to="nautobot_ssot_panorama.virtualsystem" + ), + ), + ], + options={ + "abstract": False, + }, + ), + migrations.CreateModel( + name="LogicalGroupToDevice", + fields=[ + ( + "id", + models.UUIDField( + default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True + ), + ), + ("device", models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to="dcim.device")), + ( + "group", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="nautobot_ssot_panorama.logicalgroup" + ), + ), + ], + options={ + "abstract": False, + }, + ), + migrations.AddField( + model_name="logicalgroup", + name="devices", + field=models.ManyToManyField( + related_name="logical_group", through="nautobot_ssot_panorama.LogicalGroupToDevice", to="dcim.Device" + ), + ), + migrations.AddField( + model_name="logicalgroup", + name="parent", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="children", + to="nautobot_ssot_panorama.logicalgroup", + ), + ), + migrations.AddField( + model_name="logicalgroup", + name="post_policy", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="post_policy", + to="nautobot_firewall_models.policy", + ), + ), + migrations.AddField( + model_name="logicalgroup", + name="pre_policy", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="pre_policy", + to="nautobot_firewall_models.policy", + ), + ), + migrations.AddField( + model_name="logicalgroup", + name="tags", + field=taggit.managers.TaggableManager(through="extras.TaggedItem", to="extras.Tag"), + ), + migrations.AddField( + model_name="logicalgroup", + name="virtual_systems", + field=models.ManyToManyField( + related_name="logical_group", + through="nautobot_ssot_panorama.LogicalGroupToVirtualSystem", + to="nautobot_ssot_panorama.VirtualSystem", + ), + ), + ] diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/migrations/__init__.py b/webinars/panorama-ssot/nautobot_ssot_panorama/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/models.py b/webinars/panorama-ssot/nautobot_ssot_panorama/models.py new file mode 100644 index 0000000..9b6c4b5 --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/models.py @@ -0,0 +1,166 @@ +"""Models extending the Firewall plugin.""" +from django.db import models +from django.urls import reverse +from nautobot.core.models import BaseModel +from nautobot.core.models.generics import PrimaryModel +from nautobot.extras.utils import extras_features +from nautobot.utilities.tree_queries import TreeManager +from tree_queries.models import TreeNode + + +@extras_features( + "custom_fields", + "custom_links", + "custom_validators", + "export_templates", + "graphql", + "relationships", + "statuses", + "webhooks", +) +class ControlPlaneSystem(PrimaryModel): # pylint: disable=too-many-ancestors + """Models Palo Alto Panorama.""" + + name = models.CharField(max_length=100) + device = models.OneToOneField(to="dcim.Device", null=True, related_name="panorama", on_delete=models.CASCADE) + verify_ssl = models.BooleanField(default=True, verbose_name="Verify SSL") + port = models.PositiveSmallIntegerField(default=443) + fqdn_or_ip = models.CharField(max_length=100, verbose_name="FQDN/IP") + secrets_group = models.ForeignKey( + to="extras.SecretsGroup", + on_delete=models.SET_NULL, + default=None, + blank=True, + null=True, + verbose_name="Secrets Group (API user/pass)", + ) + + class Meta: + """Meta class.""" + + ordering = ["name"] + verbose_name = "Control Plane System" + verbose_name_plural = "Control Plane Systems" + + def get_absolute_url(self): + """Return detail view URL.""" + return reverse("plugins:nautobot_ssot_panorama:controlplanesystem", args=[self.pk]) + + def __str__(self): + """Stringify instance.""" + return self.name + + +@extras_features( + "custom_fields", + "custom_links", + "custom_validators", + "export_templates", + "graphql", + "relationships", + "statuses", + "webhooks", +) +class VirtualSystem(PrimaryModel): # pylint: disable=too-many-ancestors + """Models Palo Alto VSYS.""" + + system_id = models.PositiveSmallIntegerField() + name = models.CharField(max_length=48) + device = models.ForeignKey(to="dcim.Device", related_name="vsys", on_delete=models.CASCADE) + interfaces = models.ManyToManyField( + to="dcim.Interface", related_name="assigned_vsys", through="VirtualSystemAssociation" + ) + + class Meta: + """Meta class.""" + + ordering = ["name"] + verbose_name = "Virtual System" + verbose_name_plural = "Virtual Systems" + + def get_absolute_url(self): + """Return detail view URL.""" + return reverse("plugins:nautobot_ssot_panorama:virtualsystem", args=[self.pk]) + + def __str__(self): + """Stringify instance.""" + return self.name + + +class VirtualSystemAssociation(BaseModel): + """Enforce an interface is not assigned more than once.""" + + vsys = models.ForeignKey("nautobot_ssot_panorama.VirtualSystem", on_delete=models.CASCADE) + iface = models.OneToOneField("dcim.Interface", on_delete=models.CASCADE) + + +@extras_features( + "custom_fields", + "custom_links", + "custom_validators", + "export_templates", + "graphql", + "relationships", + "statuses", + "webhooks", +) +class LogicalGroup(TreeNode, PrimaryModel): # pylint: disable=too-many-ancestors + """Logical grouping of Devices & VirtualSystems.""" + + name = models.CharField(max_length=48) + devices = models.ManyToManyField(to="dcim.Device", related_name="logical_group", through="LogicalGroupToDevice") + virtual_systems = models.ManyToManyField( + to="nautobot_ssot_panorama.VirtualSystem", related_name="logical_group", through="LogicalGroupToVirtualSystem" + ) + control_plane = models.ForeignKey( + to="nautobot_ssot_panorama.ControlPlaneSystem", + null=True, + blank=True, + related_name="logical_groups", + on_delete=models.CASCADE, + ) + pre_policy = models.ForeignKey( + to="nautobot_firewall_models.Policy", + null=True, + blank=True, + related_name="pre_policy", + on_delete=models.SET_NULL, + ) + post_policy = models.ForeignKey( + to="nautobot_firewall_models.Policy", + null=True, + blank=True, + related_name="post_policy", + on_delete=models.SET_NULL, + ) + + objects = TreeManager() + + class Meta: + """Meta class.""" + + ordering = ["name"] + verbose_name = "Logical Group" + verbose_name_plural = "Logical Groups" + + def get_absolute_url(self): + """Return detail view URL.""" + return reverse("plugins:nautobot_ssot_panorama:logicalgroup", args=[self.pk]) + + def __str__(self): + """Stringify instance.""" + return self.name + + +class LogicalGroupToDevice(BaseModel): + """Enforce a Device is not assigned more than once.""" + + group = models.ForeignKey("nautobot_ssot_panorama.LogicalGroup", on_delete=models.CASCADE) + device = models.OneToOneField("dcim.Device", on_delete=models.CASCADE) + + +class LogicalGroupToVirtualSystem(BaseModel): + """Enforce a VirtualSystem is not assigned more than once.""" + + group = models.ForeignKey("nautobot_ssot_panorama.LogicalGroup", on_delete=models.CASCADE) + vsys = models.OneToOneField("nautobot_ssot_panorama.VirtualSystem", on_delete=models.CASCADE) diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/navigation.py b/webinars/panorama-ssot/nautobot_ssot_panorama/navigation.py new file mode 100644 index 0000000..294d677 --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/navigation.py @@ -0,0 +1,51 @@ +"""Menu items.""" + +from nautobot.core.apps import NavMenuAddButton, NavMenuGroup, NavMenuItem, NavMenuTab + +menu_items = ( + NavMenuTab( + name="Security", + # weight=150, + groups=[ + NavMenuGroup( + name="Firewall", + weight=300, + items=[ + NavMenuItem( + link="plugins:nautobot_ssot_panorama:controlplanesystem_list", + name="Control Plane Systems", + permissions=["nautobot_ssot_panorama.view_controlplanesystem"], + buttons=[ + NavMenuAddButton( + link="plugins:nautobot_ssot_panorama:controlplanesystem_add", + permissions=["nautobot_ssot_panorama.add_controlplanesystem"], + ), + ], + ), + NavMenuItem( + link="plugins:nautobot_ssot_panorama:virtualsystem_list", + name="Virtual Systems", + permissions=["nautobot_ssot_panorama.view_virtualsystem"], + buttons=[ + NavMenuAddButton( + link="plugins:nautobot_ssot_panorama:virtualsystem_add", + permissions=["nautobot_ssot_panorama.add_virtualsystem"], + ), + ], + ), + NavMenuItem( + link="plugins:nautobot_ssot_panorama:logicalgroup_list", + name="Logical Groups", + permissions=["nautobot_ssot_panorama.view_logicalgroup"], + buttons=[ + NavMenuAddButton( + link="plugins:nautobot_ssot_panorama:logicalgroup_add", + permissions=["nautobot_ssot_panorama.add_logicalgroup"], + ), + ], + ), + ], + ), + ], + ), +) diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/signals.py b/webinars/panorama-ssot/nautobot_ssot_panorama/signals.py new file mode 100644 index 0000000..da07e14 --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/signals.py @@ -0,0 +1,140 @@ +# pylint: disable=invalid-name +"""Nautobot signal handler functions for panorama_sync.""" +import os + +from django.apps import apps as global_apps +from nautobot.extras.choices import CustomFieldTypeChoices, RelationshipTypeChoices +from nautobot.core.settings_funcs import is_truthy + + +def nautobot_database_ready_callback(apps=global_apps, **kwargs): # pylint: disable=too-many-locals + """Callback function for post_migrate() -- create CustomField & Relationship records.""" + CustomField = apps.get_model("extras", "CustomField") + ContentType = apps.get_model("contenttypes", "ContentType") + Relationship = apps.get_model("extras", "Relationship") + Site = apps.get_model("dcim", "Site") + Device = apps.get_model("dcim", "Device") + DeviceType = apps.get_model("dcim", "DeviceType") + DeviceRole = apps.get_model("dcim", "DeviceRole") + Manufacturer = apps.get_model("dcim", "Manufacturer") + Platform = apps.get_model("dcim", "Platform") + Status = apps.get_model("extras", "Status") + Secret = apps.get_model("extras", "Secret") + Job = apps.get_model("extras", "Job") + SecretsGroup = apps.get_model("extras", "SecretsGroup") + SecretsGroupAssociation = apps.get_model("extras", "SecretsGroupAssociation") + ComplianceFeature = apps.get_model("nautobot_golden_config", "ComplianceFeature") + ComplianceRule = apps.get_model("nautobot_golden_config", "ComplianceRule") + AddressObjectGroup = apps.get_model("nautobot_firewall_models", "AddressObjectGroup") + Application = apps.get_model("nautobot_firewall_models", "ApplicationObject") + ControlPlaneSystem = apps.get_model("nautobot_ssot_panorama", "ControlPlaneSystem") + site, _ = Site.objects.get_or_create( + name="Panorama Staging", slug="panorama-staging", status=Status.objects.get(slug="staging") + ) + palo, _ = Manufacturer.objects.get_or_create(name="Palo Alto", slug="palo-alto") + platform, _ = Platform.objects.get_or_create(name="Palo Alto Panos", slug="paloalto-panos", manufacturer=palo) + device_role, _ = DeviceRole.objects.get_or_create(name="Panorama Staging", slug="panorama-staging") + device_type, _ = DeviceType.objects.get_or_create( + model="Panorama Staging", slug="panorama-staging", manufacturer=palo + ) + Job.objects.all().update(enabled=True) + + panorama, _ = Device.objects.get_or_create( + name="NTC Demo Panorama", + device_role=device_role, + device_type=device_type, + site=site, + status=Status.objects.get(slug="active"), + platform=platform, + ) + pano_user, _ = Secret.objects.get_or_create( + name="Panorama Username", + slug="panorama-username", + provider="environment-variable", + parameters={"variable": "NAUTOBOT_PANORAMA_USER"}, + ) + pano_pass, _ = Secret.objects.get_or_create( + name="Panorama Password", + slug="panorama-password", + provider="environment-variable", + parameters={"variable": "NAUTOBOT_PANORAMA_PWD"}, + ) + secret_group, _ = SecretsGroup.objects.get_or_create( + name="NTC Demo Panorama Credentials", slug="ntc-demo-panorama-credentials" + ) + SecretsGroupAssociation.objects.get_or_create( + access_type="HTTP(S)", secret_type="password", group_id=secret_group.id, secret_id=pano_pass.id + ) + SecretsGroupAssociation.objects.get_or_create( + access_type="HTTP(S)", secret_type="username", group_id=secret_group.id, secret_id=pano_user.id + ) + ControlPlaneSystem.objects.get_or_create( + name="NTC Demo Panorama", + verify_ssl=is_truthy(os.getenv("NAUTOBOT_PANORAMA_VERIFY", True)), + port=int(os.getenv("NAUTOBOT_PANORAMA_PORT", 443)), + device=panorama, + fqdn_or_ip=os.getenv("NAUTOBOT_PANORAMA_URL"), + secrets_group=secret_group, + ) + + for name, slug in { + "Address Objects": "addressobject", + "Address Groups": "addressgroup", + "Application Objects": "application", + "Application Groups": "applicationgroup", + "Device Groups": "devicegroup", + "Dynamic User Groups": "userobjectgroup", + "Firewall Settings": "firewall", + "Policies": "policy", + "Policy Rules": "policyrule", + "Serivce Objects": "serviceobject", + "Service Groups": "servicegroup", + "Vsys Settings": "vsys", + "Zones": "zone", + }.items(): + feature, _ = ComplianceFeature.objects.get_or_create(name=name, slug=slug) + ComplianceRule.objects.get_or_create( + platform=platform, feature=feature, config_type="json", config_ordered=False + ) + + custom_field, _ = CustomField.objects.get_or_create( + type=CustomFieldTypeChoices.TYPE_TEXT, + name="group-type", + slug="group-type", + defaults={ + "label": "ObjectGroup is static or dynamic", + }, + ) + custom_field.content_types.set([ContentType.objects.get_for_model(AddressObjectGroup)]) + + custom_field, _ = CustomField.objects.get_or_create( + type=CustomFieldTypeChoices.TYPE_TEXT, + name="application-type", + slug="application-type", + defaults={ + "label": "Application is object or container", + }, + ) + custom_field.content_types.set([ContentType.objects.get_for_model(Application)]) + + custom_field, _ = CustomField.objects.get_or_create( + type=CustomFieldTypeChoices.TYPE_TEXT, + name="dynamic-address-group-filter", + slug="dynamic-address-group-filter", + defaults={ + "label": "Dynamic AddressObjectGroup filter", + }, + ) + custom_field.content_types.set([ContentType.objects.get_for_model(AddressObjectGroup)]) + + # add Application -> Application Relationship + relationship_dict = { + "name": "Application Container", + "slug": "application_container", + "type": RelationshipTypeChoices.TYPE_MANY_TO_MANY_SYMMETRIC, + "source_type": ContentType.objects.get_for_model(Application), + "source_label": "Container Application Object", + "destination_type": ContentType.objects.get_for_model(Application), + "destination_label": "Child Applications", + } + Relationship.objects.get_or_create(name=relationship_dict["name"], defaults=relationship_dict) diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/static/nautobot_ssot_panorama/panorama.png b/webinars/panorama-ssot/nautobot_ssot_panorama/static/nautobot_ssot_panorama/panorama.png new file mode 100644 index 0000000..036a022 Binary files /dev/null and b/webinars/panorama-ssot/nautobot_ssot_panorama/static/nautobot_ssot_panorama/panorama.png differ diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/tables.py b/webinars/panorama-ssot/nautobot_ssot_panorama/tables.py new file mode 100644 index 0000000..7801ae4 --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/tables.py @@ -0,0 +1,61 @@ +"""Plugin tables.""" +import django_tables2 as tables + +from nautobot.utilities.tables import ( + BaseTable, + ButtonsColumn, + ToggleColumn, +) + +from nautobot_ssot_panorama.models import VirtualSystem, LogicalGroup, ControlPlaneSystem + + +class ControlPlaneSystemTable(BaseTable): + """Table for list view of `ControlPlaneSystem` objects.""" + + pk = ToggleColumn() + name = tables.LinkColumn() + actions = ButtonsColumn(ControlPlaneSystem) + verify_ssl = tables.BooleanColumn(verbose_name="Verify SSL") + device = tables.LinkColumn() + fqdn_or_ip = tables.Column(verbose_name="FQDN/IP") + + class Meta(BaseTable.Meta): # pylint: disable=too-few-public-methods + """Meta class.""" + + model = ControlPlaneSystem + fields = ["pk", "name", "device", "verify_ssl", "port", "fqdn_or_ip"] + + +class VirtualSystemTable(BaseTable): + """Table for list view of `VirtualSystem` objects.""" + + pk = ToggleColumn() + name = tables.LinkColumn() + actions = ButtonsColumn(VirtualSystem) + system_id = tables.Column(verbose_name="System ID") + device = tables.LinkColumn() + interfaces = tables.ManyToManyColumn(linkify_item=True) + + class Meta(BaseTable.Meta): # pylint: disable=too-few-public-methods + """Meta class.""" + + model = VirtualSystem + fields = ["pk", "name", "system_id", "device", "interfaces"] + + +class LogicalGroupTable(BaseTable): + """Table for list view of `LogicalGroup` objects.""" + + pk = ToggleColumn() + name = tables.LinkColumn() + actions = ButtonsColumn(LogicalGroup) + parent = tables.LinkColumn() + devices = tables.ManyToManyColumn(linkify_item=True) + virtual_systems = tables.ManyToManyColumn(linkify_item=True) + + class Meta(BaseTable.Meta): # pylint: disable=too-few-public-methods + """Meta class.""" + + model = LogicalGroup + fields = ["pk", "name", "parent", "devices", "virtual_systems"] diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/template_content.py b/webinars/panorama-ssot/nautobot_ssot_panorama/template_content.py new file mode 100644 index 0000000..a0f66ab --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/template_content.py @@ -0,0 +1,29 @@ +"""Extensions of baseline Nautobot views.""" +from django.urls import reverse +from nautobot.extras.plugins import PluginTemplateExtension + + +class DeviceExtensions(PluginTemplateExtension): # pylint: disable=abstract-method + """Add VirtualSystem & LogicalGroup to the tabs on the Device page.""" + + model = "dcim.device" + + def detail_tabs(self): + """Add tabs to the Devices detail view.""" + return [ + { + "title": "Virtual Systems", + "url": reverse( + "plugins:nautobot_ssot_panorama:virtualsystem_device_tab", kwargs={"pk": self.context["object"].pk} + ), + }, + { + "title": "Logical Group", + "url": reverse( + "plugins:nautobot_ssot_panorama:logicalgroup_device_tab", kwargs={"pk": self.context["object"].pk} + ), + }, + ] + + +template_extensions = [DeviceExtensions] diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/templates/nautobot_ssot_panorama/controlplanesystem_retrieve.html b/webinars/panorama-ssot/nautobot_ssot_panorama/templates/nautobot_ssot_panorama/controlplanesystem_retrieve.html new file mode 100644 index 0000000..d5857bd --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/templates/nautobot_ssot_panorama/controlplanesystem_retrieve.html @@ -0,0 +1,17 @@ +{% extends 'generic/object_detail.html' %} +{% load helpers %} + +{% block content_left_page %} +
+
+ Control Plane System +
+ + + + + + +
FQDN/IP{{ object.fqdn_or_ip }}
Verify SSL{{ object.verify_ssl|render_boolean }}
Port{{ object.port }}
Device{% if object.device %}{{ object.device.name }}{% else %}{{ None|placeholder}}{% endif %}
Secrets Group{{ object.secrets_group.name }}
+
+{% endblock %} \ No newline at end of file diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/templates/nautobot_ssot_panorama/device_logical_groups.html b/webinars/panorama-ssot/nautobot_ssot_panorama/templates/nautobot_ssot_panorama/device_logical_groups.html new file mode 100644 index 0000000..be3751f --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/templates/nautobot_ssot_panorama/device_logical_groups.html @@ -0,0 +1,34 @@ +{% extends 'dcim/device.html' %} +{% load tree_helpers %} + +{% block content %} +
+
+
+
Logical Group
+ + + + + + {% for group in object.logical_group.first.ancestors %} + + + + + {% endfor %} + + {% if object.logical_group.exists %} + + + {% else %} + + + {% endif %} + +
NameRelationship
{{ group.ancestors.count|tree_position|safe }}{{ group.name }}Ancestor
{{ object.logical_group.first.ancestors.count|tree_position|safe }}{{ object.logical_group.first.name }}Directly AssignedNot Assigned To A Logical Group
+
+
+
+
+{% endblock content %} diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/templates/nautobot_ssot_panorama/device_virtual_systems.html b/webinars/panorama-ssot/nautobot_ssot_panorama/templates/nautobot_ssot_panorama/device_virtual_systems.html new file mode 100644 index 0000000..734146d --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/templates/nautobot_ssot_panorama/device_virtual_systems.html @@ -0,0 +1,41 @@ +{% extends 'dcim/device.html' %} + +{% block content %} +
+
+
+
Virtual Systems
+ + + + + + + + {% for vsys in object.vsys.all|dictsort:"system_id" %} + + + + + + + {% endfor %} +
System IDNameInterfaceLogical Group
{{ vsys.system_id }}{{ vsys.name }} + {% for iface in vsys.interfaces.all %} + {{ iface.name }}
+ {% endfor %} +
+ {% if vsys.logical_group.exists %}{{ vsys.logical_group.first.name }}{% else %}{% endif %} +
+
+
+
+
+ {% if perms.dcim.change_device and perms.nautobot_ssot_panorama.add_virtualsystem %} + + Add Virtual System + + {% endif %} +
+
+{% endblock content %} diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/templatetags/__init__.py b/webinars/panorama-ssot/nautobot_ssot_panorama/templatetags/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/templatetags/tree_helpers.py b/webinars/panorama-ssot/nautobot_ssot_panorama/templatetags/tree_helpers.py new file mode 100644 index 0000000..156032a --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/templatetags/tree_helpers.py @@ -0,0 +1,23 @@ +"""Plugin template tags.""" +from django import template +from django_jinja import library + + +register = template.Library() + + +@library.filter() +@register.filter() +def tree_position(value: int) -> str: + """Used for rendering nested group position. + + Args: + value (int): the index of the nesting + + Returns: + str: table row string + """ + if value == 0: + return "" + value = value - 1 + return " " * 4 * value + "↳ " diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/tests/__init__.py b/webinars/panorama-ssot/nautobot_ssot_panorama/tests/__init__.py new file mode 100644 index 0000000..0501c3a --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/tests/__init__.py @@ -0,0 +1 @@ +"""Unit tests for nautobot_ssot_panorama plugin.""" diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/tests/fixtures/get_sites.json b/webinars/panorama-ssot/nautobot_ssot_panorama/tests/fixtures/get_sites.json new file mode 100644 index 0000000..e69de29 diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/tests/test_api.py b/webinars/panorama-ssot/nautobot_ssot_panorama/tests/test_api.py new file mode 100644 index 0000000..69ceaf2 --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/tests/test_api.py @@ -0,0 +1,28 @@ +"""Unit tests for nautobot_ssot_panorama.""" +from django.contrib.auth import get_user_model +from django.test import TestCase +from django.urls import reverse +from rest_framework import status +from rest_framework.test import APIClient + +from nautobot.users.models import Token + +User = get_user_model() + + +class PlaceholderAPITest(TestCase): + """Test the NautobotSSoTPanorama API.""" + + def setUp(self): + """Create a superuser and token for API calls.""" + self.user = User.objects.create(username="testuser", is_superuser=True) + self.token = Token.objects.create(user=self.user) + self.client = APIClient() + self.client.credentials(HTTP_AUTHORIZATION=f"Token {self.token.key}") + + def test_placeholder(self): + """Verify that devices can be listed.""" + url = reverse("dcim-api:device-list") + response = self.client.get(url) + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.data["count"], 0) diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/tests/test_basic.py b/webinars/panorama-ssot/nautobot_ssot_panorama/tests/test_basic.py new file mode 100644 index 0000000..e845561 --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/tests/test_basic.py @@ -0,0 +1,16 @@ +"""Basic tests that do not require Django.""" +import unittest +import os +import toml + +from nautobot_ssot_panorama import __version__ as project_version + + +class TestVersion(unittest.TestCase): + """Test Version is the same.""" + + def test_version(self): + """Verify that pyproject.toml version is same as version specified in the package.""" + parent_path = os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))) + poetry_version = toml.load(os.path.join(parent_path, "pyproject.toml"))["tool"]["poetry"]["version"] + self.assertEqual(project_version, poetry_version) diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/tests/test_panorama_adapter.py b/webinars/panorama-ssot/nautobot_ssot_panorama/tests/test_panorama_adapter.py new file mode 100644 index 0000000..f84c5ea --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/tests/test_panorama_adapter.py @@ -0,0 +1,45 @@ +"""Test Panorama adapter.""" + +import json +import uuid +from unittest.mock import MagicMock + +from django.contrib.contenttypes.models import ContentType +from nautobot.extras.models import Job, JobResult +from nautobot.utilities.testing import TransactionTestCase +from nautobot_ssot_panorama.diffsync.adapters.panorama import PanoramaAdapter +from nautobot_ssot_panorama.jobs import PanoramaDataSource + + +def load_json(path): + """Load a json file.""" + with open(path, encoding="utf-8") as file: + return json.loads(file.read()) + + +SITE_FIXTURE = [] + + +class TestPanoramaAdapterTestCase(TransactionTestCase): + """Test NautobotSSoTPanoramaAdapter class.""" + + databases = ("default", "job_logs") + + def setUp(self): + """Initialize test case.""" + self.panorama_client = MagicMock() + self.panorama_client.get_sites.return_value = SITE_FIXTURE + + self.job = PanoramaDataSource() + self.job.job_result = JobResult.objects.create( + name=self.job.class_path, obj_type=ContentType.objects.get_for_model(Job), user=None, job_id=uuid.uuid4() + ) + self.panorama = PanoramaAdapter(job=self.job, sync=None, client=self.panorama_client) + + def test_data_loading(self): + """Test Nautobot SSoT Panorama load() function.""" + # self.panorama.load() + # self.assertEqual( + # {site["name"] for site in SITE_FIXTURE}, + # {site.get_unique_id() for site in self.panorama.get_all("site")}, + # ) diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/urls.py b/webinars/panorama-ssot/nautobot_ssot_panorama/urls.py new file mode 100644 index 0000000..062ad86 --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/urls.py @@ -0,0 +1,67 @@ +"""Plugin URLS.""" +from django.urls import path +from nautobot.core.views.routers import NautobotUIViewSetRouter +from nautobot.extras.views import ObjectChangeLogView, ObjectNotesView + +from nautobot_ssot_panorama.views import ( + VirtualSystemUIViewSet, + LogicalGroupUIViewSet, + ControlPlaneSystemUIViewSet, + DeviceVirtualSystemTabView, + DeviceLogicalGroupTabView, +) +from nautobot_ssot_panorama.models import VirtualSystem, LogicalGroup, ControlPlaneSystem + +router = NautobotUIViewSetRouter() +router.register("virtual-system", VirtualSystemUIViewSet) +router.register("control-plane-system", ControlPlaneSystemUIViewSet) +router.register("logical-group", LogicalGroupUIViewSet) +urlpatterns = [ + path( + "control-plane-system//changelog/", + ObjectChangeLogView.as_view(), + name="controlplanesystem_changelog", + kwargs={"model": ControlPlaneSystem}, + ), + path( + "control-plane-system//notes/", + ObjectNotesView.as_view(), + name="controlplanesystem_notes", + kwargs={"model": ControlPlaneSystem}, + ), + path( + "virtual-system//changelog/", + ObjectChangeLogView.as_view(), + name="virtualsystem_changelog", + kwargs={"model": VirtualSystem}, + ), + path( + "virtual-system//notes/", + ObjectNotesView.as_view(), + name="virtualsystem_notes", + kwargs={"model": VirtualSystem}, + ), + path( + "virtual-system//device/", + DeviceVirtualSystemTabView.as_view(), + name="virtualsystem_device_tab", + ), + path( + "logical-group//changelog/", + ObjectChangeLogView.as_view(), + name="logicalgroup_changelog", + kwargs={"model": LogicalGroup}, + ), + path( + "logical-group//notes/", + ObjectNotesView.as_view(), + name="logicalgroup_notes", + kwargs={"model": LogicalGroup}, + ), + path( + "logical-group//device/", + DeviceLogicalGroupTabView.as_view(), + name="logicalgroup_device_tab", + ), +] +urlpatterns += router.urls diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/utils/__init__.py b/webinars/panorama-ssot/nautobot_ssot_panorama/utils/__init__.py new file mode 100644 index 0000000..ebe887c --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/utils/__init__.py @@ -0,0 +1 @@ +"""Utility functions for working with Panorama and Nautobot.""" diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/utils/nautobot.py b/webinars/panorama-ssot/nautobot_ssot_panorama/utils/nautobot.py new file mode 100644 index 0000000..dbbbd21 --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/utils/nautobot.py @@ -0,0 +1,387 @@ +"""Utility functions for working with Nautobot.""" +from ipaddress import ip_address, ip_network +import re + +from nautobot.dcim.models import Device, DeviceType, DeviceRole, Site, Interface +from nautobot.extras.models import Status +from nautobot.ipam.models import IPAddress, Prefix +from nautobot_firewall_models.models import ( + IPRange, + FQDN, + AddressObject, + AddressObjectGroup, + ApplicationObject, + ApplicationObjectGroup, + ServiceObject, + ServiceObjectGroup, + Zone, + UserObjectGroup, + PolicyRule, + Policy, +) + +from nautobot_ssot_panorama.models import ControlPlaneSystem, LogicalGroup, VirtualSystem + + +class Nautobot: # pylint: disable=too-many-public-methods + """Helper methods for interacting with Django ORM.""" + + def create_vsys(self, ids, attrs): # pylint: disable=no-self-use + """Creates Vsys.""" + device = Device.objects.get(serial=ids["parent"]) + ifaces = device.interfaces.filter(name__in=attrs["interfaces"]) + sysid = int(re.sub("[^0-9]", "", ids["name"])) + vsys, _ = VirtualSystem.objects.get_or_create(name=ids["name"], device=device, system_id=sysid) + vsys.interfaces.set(list(ifaces)) + return vsys + + def update_vsys(self, name, parent, attrs): # pylint: disable=no-self-use + """Upates Vsys.""" + vsys = VirtualSystem.objects.get(name=name, device=parent) + if "interfaces" in attrs: + ifaces = Interface.objects.filter(device__id=parent, name__in=attrs["interfaces"]) + vsys.interfaces.clear() + vsys.interfaces.set(list(ifaces)) + return vsys + + def create_firewall(self, ids, attrs): # pylint: disable=no-self-use + """Creates a Firewall.""" + if Device.objects.filter(serial=ids["serial"]).exists(): + return Device.objects.get(serial=ids["serial"]) + + device = Device.objects.create( + status=Status.objects.get(name="Staging"), + serial=ids["serial"], + name=attrs["name"], + device_role=DeviceRole.objects.get(name="Panorama Staging"), + device_type=DeviceType.objects.get(model="Panorama Staging"), + site=Site.objects.get(name="Panorama Staging"), + ) + for i in attrs.get("interfaces", []): + Interface.objects.create(device=device, name=i) + return device + + def update_firewall(self, serial, attrs): # pylint: disable=no-self-use + """Updates a Firewall.""" + device = Device.objects.get(name=serial) + if "name" in attrs: + device.name = attrs["name"] + device.validated_save() + if "interfaces" in attrs: + Interface.objects.filter(device=device).exclude(name__in=attrs["interfaces"]).delete() + for i in attrs["interfaces"]: + Interface.objects.get_or_create(name=i, device=device) + return device + + def create_address_object(self, ids, attrs): # pylint: disable=no-self-use + """Creates an AddressObject and any child objects.""" + if AddressObject.objects.filter(name=ids["name"]).exists(): + return AddressObject.objects.get(name=ids["name"]) + + status = Status.objects.get(name="Active") + addr_type = attrs["type"] + + if addr_type == "ip-wildcard": + raise ValueError("IP Wildcard is not supported.") + + if addr_type == "fqdn": + addr, _ = FQDN.objects.get_or_create(name=attrs["address"], status=status) + return AddressObject.objects.create(name=ids["name"], fqdn=addr, status=status) + + if addr_type == "ip-range": + addr_range = attrs["address"].split("-") + addr, _ = IPRange.objects.get_or_create( + start_address=addr_range[0], end_address=addr_range[1], status=status + ) + return AddressObject.objects.create(name=ids["name"], ip_range=addr, status=status) + + try: + ip_address(attrs["address"]) + addr, _ = IPAddress.objects.get_or_create(address=attrs["address"]) + return AddressObject.objects.create(name=ids["name"], ip_address=addr, status=status) + except ValueError: + pass + try: + ip_net = ip_network(attrs["address"]) + addr, _ = Prefix.objects.get_or_create(network=str(ip_net.network_address), prefix_length=ip_net.prefixlen) + return AddressObject.objects.create(name=ids["name"], prefix=addr, status=status) + except ValueError: + ip_address(attrs["address"].split("/")[0]) + addr, _ = IPAddress.objects.get_or_create(address=attrs["address"]) + return AddressObject.objects.create(name=ids["name"], ip_address=addr, status=status) + + def update_address_object(self, name, type, attrs): # pylint: disable=no-self-use,redefined-builtin + """Updates an AddressObject and any child objects.""" + addr_type = type + + if addr_type == "ip-wildcard": + raise ValueError("IP Wildcard is not supported.") + + obj = AddressObject.objects.get(name=name) + obj.fqdn = None + obj.ip_range = None + obj.ip_address = None + obj.prefix = None + + if addr_type == "fqdn": + addr = FQDN.objects.get(name=attrs["address"]) + obj.fqdn = addr + + elif addr_type == "ip-range": + addr_range = attrs["address"].split("-") + addr = IPRange.objects.get(start_address=addr_range[0], end_address=addr_range[1]) + obj.ip_range = addr + + else: + try: + ip_address(attrs["address"]) + addr = IPAddress.objects.get(address=attrs["address"]) + obj.ip_address = addr + except ValueError: + pass + try: + ip_net = ip_network(attrs["address"]) + addr = Prefix.objects.get(network=str(ip_net.network_address), prefix_length=ip_net.prefixlen) + obj.prefix = addr + except ValueError: + ip_address(attrs["address"].split("/")[0]) + addr = IPAddress.objects.get(address=attrs["address"]) + obj.ip_address = addr + + obj.validated_save() + return obj + + def create_address_group(self, ids, attrs): + """Creates an AddressObjectGroup and any child objects.""" + group, _ = AddressObjectGroup.objects.get_or_create(name=ids["name"]) + + group.custom_field_data.update( + {"group-type": attrs.get("type"), "dynamic-address-group-filter": attrs.get("filter")} + ) + self._set_many_to_many(group, AddressObject, "address_objects", attrs, "addressobjects") + group.validated_save() + return group + + def update_address_group(self, name, attrs): + """Updates an AddressObjectGroup and any child objects.""" + group = AddressObjectGroup.objects.get(name=name) + self._set_many_to_many(group, AddressObject, "address_objects", attrs, "addressobjects") + if "type" in attrs: + group.custom_field_data.update({"group-type": attrs.get("type")}) + if "filter" in attrs: + group.custom_field_data.update({"dynamic-address-group-filter": attrs.get("filter")}) + group.validated_save() + return group + + def create_application_object(self, ids, attrs): + """Creates an Application and any child objects.""" + if ApplicationObject.objects.filter(name=ids["name"]).exists(): + return ApplicationObject.objects.get(name=ids["name"]) + app_members = attrs.pop("members") + obj = ApplicationObject.objects.create(name=ids["name"]) + obj = self._set_application_attrs(obj, attrs) + obj.validated_save() + return obj, app_members + + def update_application_object(self, name, attrs): + """Updates an Application and any child objects.""" + app_members = attrs.pop("members") + obj = self._set_application_attrs(ApplicationObject.objects.get(name=name), attrs) + obj.validated_save() + return obj, app_members + + def _set_application_attrs(self, obj, attrs): # pylint: disable=no-self-use + """Helper method to reduce repeated code.""" + for attr, value in attrs.items(): + if attr == "type": + obj.custom_field_data.update({"application-type": value}) + else: + setattr(obj, attr, value) + return obj + + def create_application_group(self, ids, attrs): + """Creates an ApplicationGroup and any child objects.""" + group, _ = ApplicationObjectGroup.objects.get_or_create(name=ids["name"]) + self._set_many_to_many(group, ApplicationObject, "application_objects", attrs, "applications") + return group + + def update_application_group(self, name, attrs): + """Updates an ApplicationGroup and any child objects.""" + group = ApplicationObjectGroup.objects.get(name=name) + self._set_many_to_many(group, ApplicationObject, "application_objects", attrs, "applications") + return group + + def create_device_group(self, ids, attrs): # pylint: disable=no-self-use + """Creates an DeviceGroup and any child objects.""" + group, _ = LogicalGroup.objects.get_or_create(name=ids["name"]) + if attrs.get("parent"): + group.parent, _ = LogicalGroup.objects.get_or_create(name=attrs["parent"]) + group.control_plane = ControlPlaneSystem.objects.get(id=attrs["panorama"]) + + if "firewalls" in attrs: + group.devices.clear() + if isinstance(attrs["firewalls"], list): + for i in attrs["firewalls"]: + group.devices.add(Device.objects.get(serial=i)) + + if "vsys" in attrs: + group.virtual_systems.clear() + if isinstance(attrs["vsys"], list): + for i in attrs["vsys"]: + group.virtual_systems.add(VirtualSystem.objects.get(name=i["name"], device__serial=i["parent"])) + + if attrs.get("pre_policy"): + group.pre_policy = Policy.objects.get(name=attrs["pre_policy"]) + if attrs.get("post_policy"): + group.post_policy = Policy.objects.get(name=attrs["post_policy"]) + group.validated_save() + return group + + def update_device_group(self, name, attrs): # pylint: disable=no-self-use + """Updates an DeviceGroup and any child objects.""" + group = LogicalGroup.objects.get(name=name) + if "parent" in attrs and attrs["parent"]: + group.parent = LogicalGroup.objects.get(name=attrs["parent"]) + elif "parent" in attrs: + group.parent = None + if not group.control_plane and attrs.get("panorama"): + group.control_plane = ControlPlaneSystem.objects.get(id=attrs["panorama"]) + if "firewalls" in attrs: + group.devices.clear() + if isinstance(attrs["firewalls"], list): + for i in attrs["firewalls"]: + group.devices.add(Device.objects.get(serial=i)) + if "vsys" in attrs: + group.virtual_systems.clear() + if isinstance(attrs["vsys"], list): + for i in attrs["vsys"]: + group.virtual_systems.add(VirtualSystem.objects.get(name=i["name"], device__serial=i["parent"])) + if attrs.get("pre_policy"): + group.pre_policy = Policy.objects.get(name=attrs["pre_policy"]) + if attrs.get("post_policy"): + group.post_policy = Policy.objects.get(name=attrs["post_policy"]) + group.validated_save() + return group + + def create_service_group(self, ids, attrs): + """Creates an ServiceObjectGroup and any child objects.""" + group, _ = ServiceObjectGroup.objects.get_or_create(name=ids["name"]) + self._set_many_to_many(group, ServiceObject, "service_objects", attrs, "serviceobjects") + return group + + def update_service_group(self, name, attrs): + """Updates an ServiceObjectGroup and any child objects.""" + group = ServiceObjectGroup.objects.get(name=name) + self._set_many_to_many(group, ServiceObject, "service_objects", attrs, "serviceobjects") + return group + + def create_service_object(self, ids, attrs): # pylint: disable=no-self-use + """Creates a ServiceObject and any child objects.""" + if ServiceObject.objects.filter(name=ids["name"]).exists(): + return ServiceObject.objects.get(name=ids["name"]) + + status = Status.objects.get(name="Active") + port = attrs["port"] + protocol = attrs["protocol"] + return ServiceObject.objects.create(name=ids["name"], port=port, ip_protocol=protocol, status=status) + + def update_service_object(self, name, attrs): # pylint: disable=no-self-use + """Updates an AddressObject and any child objects.""" + obj = ServiceObject.objects.get(name=name) + + if attrs.get("port"): + obj.port = attrs["port"] + if attrs.get("protocol"): + obj.ip_protocol = attrs["protocol"] + + obj.validated_save() + return obj + + def create_user_object_group(self, ids): # pylint: disable=no-self-use + """Creates a UserObjectGroup and any child objects.""" + if UserObjectGroup.objects.filter(name=ids["name"]).exists(): + return UserObjectGroup.objects.get(name=ids["name"]) + status = Status.objects.get(name="Active") + return UserObjectGroup.objects.create(name=ids["name"], status=status) + + def create_zone(self, ids, firewalls): # pylint: disable=no-self-use + """Creates Zone.""" + if Zone.objects.filter(name=ids["name"]).exists(): + return Zone.objects.get(name=ids["name"]) + status = Status.objects.get(name="Active") + ifaces = [] + for firewall, iface_list in firewalls.items(): + ifaces += list(Device.objects.get(serial=firewall).interfaces.filter(name__in=iface_list)) + zone = Zone.objects.create(name=ids["name"], status=status) + zone.interfaces.set(ifaces) + return zone + + def create_policy_rule(self, ids, attrs): + """Creates PolicyRule.""" + if PolicyRule.objects.filter(name=ids["name"]).exists(): + return PolicyRule.objects.get(name=ids["name"]) + status = Status.objects.get(name="Active") + rule = PolicyRule.objects.create( + name=ids["name"], log=attrs["log"], action=attrs["action"], index=attrs["index"], status=status + ) + return self._set_policy_rules_data(rule, attrs) + + def _set_policy_rules_data(self, rule, attrs): + """Wrapper to deduplication code.""" + self._set_many_to_many(rule, AddressObject, "source_addresses", attrs, "sourceaddressobjects") + self._set_many_to_many(rule, AddressObjectGroup, "source_address_groups", attrs, "sourceaddressgroups") + self._set_many_to_many(rule, AddressObject, "destination_addresses", attrs, "destaddressobjects") + self._set_many_to_many(rule, AddressObjectGroup, "destination_address_groups", attrs, "destaddressgroups") + + self._set_many_to_many(rule, ServiceObject, "source_services", attrs, "sourceserviceobjects") + self._set_many_to_many(rule, ServiceObjectGroup, "source_service_groups", attrs, "sourceservicegroups") + self._set_many_to_many(rule, ServiceObject, "destination_services", attrs, "destserviceobjects") + self._set_many_to_many(rule, ServiceObjectGroup, "destination_service_groups", attrs, "destservicegroups") + + self._set_many_to_many(rule, ApplicationObject, "applications", attrs, "applications") + self._set_many_to_many(rule, ApplicationObjectGroup, "application_groups", attrs, "applicationgroups") + + self._set_many_to_many(rule, UserObjectGroup, "source_user_groups", attrs, "usergroups") + + if attrs.get("sourcezone"): + rule.source_zone = Zone.objects.get(name=attrs["sourcezone"]) + elif "sourcezone" in attrs: + rule.source_zone = None + + if attrs.get("destzone"): + rule.destination_zone = Zone.objects.get(name=attrs["destzone"]) + elif "destzone" in attrs: + rule.destination_zone = None + + rule.validated_save() + return rule + + def _set_many_to_many( + self, parent_obj, child_obj, parent_attr, attrs, attrs_key + ): # pylint: disable=no-self-use,too-many-arguments + """Helper for setting ManyToManyFields.""" + if attrs_key not in attrs: + return parent_obj + if not attrs[attrs_key]: + getattr(parent_obj, parent_attr).clear() + else: + child_attr = getattr(parent_obj, parent_attr) + child_attr.set(list(child_obj.objects.filter(name__in=attrs[attrs_key]))) + return parent_obj + + def update_policy_rule(self, name, attrs): # pylint: disable=no-self-use + """Updates PolicyRule.""" + rule = PolicyRule.objects.get(name=name) + return self._set_policy_rules_data(rule, attrs) + + def create_policy(self, ids, attrs): + """Creates Policy.""" + policy, _ = Policy.objects.get_or_create(name=ids["name"]) + self._set_many_to_many(policy, PolicyRule, "policy_rules", attrs, "policyrule_names") + return policy + + def update_policy(self, name, attrs): + """Updates Policy.""" + obj = Policy.objects.get(name=name) + self._set_many_to_many(obj, PolicyRule, "policy_rules", attrs, "policyrule_names") + return obj diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/utils/panorama/__init__.py b/webinars/panorama-ssot/nautobot_ssot_panorama/utils/panorama/__init__.py new file mode 100644 index 0000000..00fa3c1 --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/utils/panorama/__init__.py @@ -0,0 +1,28 @@ +"""Panorama SDK.""" +from panos.panorama import Panorama as PanOsPanorama + +from .address import PanoramaAddress +from .application import PanoramaApplication +from .device_group import PanoramaDeviceGroup +from .firewall import PanoramaFirewall +from .policy import PanoramaPolicy +from .service import PanoramaService +from .user import PanoramaUser + + +class Panorama: # pylint: disable=too-many-instance-attributes,too-few-public-methods + """Wrapper on Panorama python SDK.""" + + def __init__( + self, url=None, username=None, password=None, verify=True, port=443 + ): # pylint: disable=too-many-arguments + """Create base connectivity to Panorama.""" + self.pano = PanOsPanorama(url, api_username=username, api_password=password, port=port, verify=verify) + self.device_group = PanoramaDeviceGroup(self.pano, {}) + device_groups = self.device_group.retrieve_device_groups() + self.address = PanoramaAddress(self.pano, device_groups) + self.application = PanoramaApplication(self.pano, device_groups) + self.firewall = PanoramaFirewall(self.pano, device_groups) + self.policy = PanoramaPolicy(self.pano, device_groups) + self.service = PanoramaService(self.pano, device_groups) + self.user = PanoramaUser(self.pano, device_groups) diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/utils/panorama/address.py b/webinars/panorama-ssot/nautobot_ssot_panorama/utils/panorama/address.py new file mode 100644 index 0000000..b640378 --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/utils/panorama/address.py @@ -0,0 +1,100 @@ +"""AddressObject API.""" +from panos.objects import AddressGroup, AddressObject + +from .base import BaseAPI + + +class PanoramaAddress(BaseAPI): + """Address Objects API SDK.""" + + addresses = {} + + def _delete_instance(self, name): + """Deletes an instance of an AddressObject or AddressGroup.""" + obj = self.addresses.pop(name) + obj.delete() + + def get(self, name): + """Returns a prefetched instance.""" + return self.addresses[name]["value"] + + ##################### + # AddressGroup + ##################### + + def create_address_group( + self, name, grp_type, location=None, addrs=None, filter=None + ): # pylint: disable=redefined-builtin,too-many-arguments + """Creates AddressGroup.""" + location = self._get_location(location) + if grp_type == "static": + group = AddressGroup(name, static_value=addrs) + else: + group = AddressGroup(name, dynamic_value=filter) + location.add(group) + group.create() + self.addresses[group.name] = { + "value": group, + "type": "group", + "location": "shared" if location == self.pano else location.name, + } + return group + + def retrieve_address_groups(self): + """Returns all AddressGroups.""" + self.addresses.update(self._get_all_via_device_groups(AddressGroup, "group")) + return self.addresses + + def update_address_group(self, name, grp_type, addrs=None, filter=None): # pylint: disable=redefined-builtin + """Updates a single AddressGroup.""" + group = self.get(name) + if grp_type == "static": + group.static_value = addrs + group.dynamic_value = None + else: + group.dynamic_value = filter + group.static_value = None + group.apply() + self.addresses[name]["value"] = group + return group + + def delete_address_group(self, name): + """Deletes a single AddressGroup.""" + self._delete_instance(name) + + ##################### + # AddressObject + ##################### + + def create_address_object(self, name, address, addr_type, location=None): + """Creates AddressObject.""" + location = self._get_location(location) + addr = AddressObject(name, value=address, type=addr_type) + location.add(addr) + addr.create() + self.addresses[addr.name] = { + "value": addr, + "type": "object", + "location": "shared" if location == self.pano else location.name, + } + return addr + + def retrieve_address_objects(self): + """Returns all AddressObjects.""" + self.addresses.update(self._get_all_via_device_groups(AddressObject, "object")) + return self.addresses + + def update_address_object(self, name, address=None, addr_type=None): + """Updates AddressObject.""" + addr = self.get(name) + if address: + addr.value = address + if addr_type: + addr.type = addr_type + addr.apply() + self.addresses[name]["value"] = addr + return addr + + def delete_address_object(self, name): + """Deletes a single AddressObject.""" + self._delete_instance(name) diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/utils/panorama/application.py b/webinars/panorama-ssot/nautobot_ssot_panorama/utils/panorama/application.py new file mode 100644 index 0000000..8c55a1d --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/utils/panorama/application.py @@ -0,0 +1,131 @@ +"""ApplicationObject API.""" +from panos.objects import ApplicationGroup, ApplicationObject +from panos.predefined import Predefined + +from .base import BaseAPI + + +class PanoramaApplication(BaseAPI): + """Application Objects API SDK.""" + + applications = {} + + def _delete_instance(self, name): + """Deletes an instance of an ApplicationGroup or ApplicationObject.""" + obj = self.applications.pop(name) + obj.delete() + + def get(self, name): + """Returns a prefetched instance.""" + return self.applications[name]["value"] + + ##################### + # ApplicationGroup + ##################### + + def create_application_group(self, location=None, name=None, applications=None): + """Create ApplicationGroup.""" + location = self._get_location(location) + group = ApplicationGroup(name, value=applications) + location.add(group) + group.create() + self.applications[group.name] = { + "value": group, + "type": "group", + "location": "shared" if location == self.pano else location.name, + } + return group + + def retrieve_application_groups(self): + """Returns all ApplicationGroups.""" + self.applications.update(self._get_all_via_device_groups(ApplicationGroup, "group")) + return self.applications + + def update_application_group(self, name, applications): + """Updates a single instance of an applicationgroup.""" + group = self.applications[name]["value"] + group.value = applications + group.apply() + self.applications[name]["value"] = group + return group + + def delete_application_group(self, name): + """Deletes a single ApplicationGroup.""" + self._delete_instance(name) + + ##################### + # ApplicationObject + ##################### + + def create_application_object( + self, + name, + category, + subcategory, + technology, + risk, + default_ports, + default_ip_protocol, + description, + default_type="port", + location=None, + ): # pylint: disable=too-many-arguments + """Creates ApplicationObject.""" + location = self._get_location(location) + app = ApplicationObject( + name, + category=category, + subcategory=subcategory, + technology=technology, + risk=risk, + default_ports=default_ports, + default_ip_protocol=default_ip_protocol, + description=description, + default_port=default_ports, + default_type=default_type, + ) + location.add(app) + app.create() + self.applications[app.name] = { + "value": app, + "type": "object", + "location": "shared" if location == self.pano else location.name, + } + return app + + def retrieve_application_objects(self): + """Returns all ApplicationObjects.""" + predefined = Predefined(self.pano) + predefined.refreshall_applications() + self.applications.update( + { + name: {"value": app, "type": "object", "location": "predefined"} + for name, app in predefined.application_objects.items() + } + ) + self.applications.update( + { + name: {"value": app, "type": "container", "location": "predefined"} + for name, app in predefined.application_container_objects.items() + } + ) + + self.applications.update(self._get_all_via_device_groups(ApplicationObject, "object")) + return self.applications + + def update_application_object(self, name, **kwargs): + """Updates a single ApplicationObject.""" + if self.applications[name]["location"] == "predefined": + raise ValueError("Unable to update predefined application") + app = self.applications[name]["value"] + for attr, value in kwargs.items(): + if hasattr(app, attr): + setattr(app, attr, value) + else: + raise ValueError(f"Unsupported attribute {attr}") + + def delete_application_object_object(self, name): + """Deletes a single ApplicationObject.""" + if self.applications[name]["location"] == "predefined": + raise ValueError("Unable to delete predefined application") + self._delete_instance(name) diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/utils/panorama/base.py b/webinars/panorama-ssot/nautobot_ssot_panorama/utils/panorama/base.py new file mode 100644 index 0000000..8c5184b --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/utils/panorama/base.py @@ -0,0 +1,30 @@ +"""Base API SDK Class.""" +from panos.panorama import DeviceGroup + + +class BaseAPI: # pylint: disable=too-few-public-methods + """Create a base API for reuse.""" + + def __init__(self, panorama, device_groups, job=None): + """Init object with panorama instnace, job, & device group.""" + self.pano = panorama + self.device_groups = device_groups + self.job = job + + def _get_all_via_device_groups(self, obj_class, obj_type): + output = {} + for group in self.device_groups.values(): + for obj in obj_class.refreshall(group): + output[obj.name] = {"value": obj, "type": obj_type, "location": group.name} + for obj in obj_class.refreshall(self.pano): + output[obj.name] = {"value": obj, "type": obj_type, "location": "shared"} + return output + + def _get_location(self, location): + if not location: + return self.pano + if isinstance(location, str): + return self.device_groups[location] + if isinstance(location, DeviceGroup): + return location + raise ValueError("Invalid location.") diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/utils/panorama/device_group.py b/webinars/panorama-ssot/nautobot_ssot_panorama/utils/panorama/device_group.py new file mode 100644 index 0000000..dd6eac8 --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/utils/panorama/device_group.py @@ -0,0 +1,50 @@ +"""DeviceGroup API.""" +from panos.panorama import DeviceGroup, DeviceGroupHierarchy, PanoramaDeviceGroupHierarchy + +from .base import BaseAPI + + +class PanoramaDeviceGroup(BaseAPI): + """DeviceGroup Objects API SDK.""" + + def get(self, name): + """Returns a prefetched instance.""" + return self.device_groups[name]["value"] + + def get_parent(self, name): + """Returns parent DeviceGroup name.""" + return PanoramaDeviceGroupHierarchy(self.pano).fetch().get(name) + + def create_device_group(self, name, parent=None): + """Creates a DeviceGroup.""" + dev_group = DeviceGroup(name) + self.pano.add(dev_group) + dev_group.create() + self.device_groups[name] = dev_group + if parent: + if not self.device_groups.get(parent): + parent_dg = DeviceGroup(parent) + self.pano.add(parent_dg) + parent_dg.create() + self.device_groups[parent] = parent_dg + dgh = DeviceGroupHierarchy(dev_group) + dgh.parent = parent + dgh.update() + return dev_group + + def retrieve_device_groups(self): + """Returns all DeviceGroups.""" + self.device_groups = {i.name: i for i in self.pano.refresh_devices() if isinstance(i, DeviceGroup)} + return self.device_groups + + def update_device_group(self, name, parent): + """Updates a DeviceGroup.""" + dgh = DeviceGroupHierarchy(self.get(name)) + dgh.parent = parent + dgh.update() + return self.get(name) + + def delete_device_group(self, name): + """Deletes a DeviceGroup.""" + dev_group = self.get(name) + dev_group.delete() diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/utils/panorama/firewall.py b/webinars/panorama-ssot/nautobot_ssot_panorama/utils/panorama/firewall.py new file mode 100644 index 0000000..4542e1d --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/utils/panorama/firewall.py @@ -0,0 +1,162 @@ +"""Zone API.""" +from panos.network import Zone +from panos.device import Vsys, SystemSettings +from panos.errors import PanDeviceXapiError +from panos.firewall import Firewall + +from .base import BaseAPI + + +class PanoramaFirewall(BaseAPI): + """Firewall Zone Vsys Objects API SDK.""" + + zones = {} + firewalls = {} + vsys = {} + + ##################### + # Firewall + ##################### + + def get_firewall(self, serial): + """Returns a prefetched instance.""" + return self.firewalls[serial]["value"] + + def get_hostname(self, firewall: "Firewall") -> str: # pylint: disable=no-self-use + """Returns a firewall's hostname if reachable else serial. + + Args: + firewall (Firewall): panos.firewall.Firewall instance + + Returns: + str: Hostname or serial + """ + try: + return SystemSettings.refreshall(firewall)[0].hostname + except PanDeviceXapiError: + return firewall.serial + + def retrieve_firewalls(self): + """Returns all Firewalls.""" + for d_g in self.device_groups.values(): + for firewall in d_g.children: + if not isinstance(firewall, Firewall): + continue + self.firewalls[firewall.serial] = firewall + return self.firewalls + + def create_firewall(self, serial, group): + """Create Firewall.""" + firewall = Firewall(serial=serial) + self.device_groups[group].add(firewall) + self.pano.add(firewall) + firewall.create() + self.firewalls[firewall.serial] = firewall + return firewall + + def update_firewall(self): + """Update Firewall.""" + raise NotImplementedError("Not implemented.") + + def delete_firewall(self, serial): + """Deletes an instance of a Firewall.""" + obj = self.firewalls.pop(serial) + obj.delete() + self.zones.pop(serial) + self.vsys.pop(serial) + + ##################### + # Vsys + ##################### + + def get_vsys(self, firewall, vsys): + """Returns a prefetched instance.""" + return self.vsys[firewall][vsys] + + def retrieve_vsys(self): + """Returns all Vsys.""" + for d_g in self.device_groups.values(): + for firewall in d_g.children: + if not isinstance(firewall, Vsys): + continue + self.vsys[firewall.serial] = {} + for vsys in Vsys.refreshall(firewall): + self.vsys[firewall.serial][vsys.name] = vsys + return self.vsys + + def create_vsys(self, serial, name): + """Returns all Vsys.""" + firewall = self.get_firewall(serial) + vsys = Vsys(name) + firewall.add(vsys) + vsys.create() + self.vsys[serial][name] = vsys + return vsys + + def update_vsys(self): + """Returns all Vsys.""" + raise NotImplementedError("Not implemented.") + + def delete_vsys(self, serial, vsys): + """Returns all Vsys.""" + vsys = self.vsys[serial][vsys] + vsys.delete() + + ##################### + # Zone + ##################### + + def get_zone(self, firewall, name): + """Returns a prefetched instance.""" + return self.zones[firewall][name]["value"] + + def retrieve_zones(self): + """Returns all Zones.""" + for d_g in self.device_groups.values(): + for firewall in d_g.children: + if not isinstance(firewall, Firewall): + continue + try: + for zone in Zone.refreshall(firewall): + if not zone.interface: + continue + if not self.zones.get(firewall.serial): + self.zones[firewall.serial] = {zone.name: zone} + else: + self.zones[firewall.serial][zone.name] = zone + except PanDeviceXapiError: + pass + return self.zones + + def create_zone( + self, + name, + firewall, + ifaces, + ): + """Create Zone.""" + zone = Zone(name=name, mode="layer3", interface=ifaces) + self.firewalls[firewall]["value"].add(zone) + zone.create() + + if self.zones.get(firewall): + self.zones[firewall] = {} + self.zones[firewall][zone.name] = zone + return zone + + def update_zone( + self, + name, + firewall, + ifaces, + ): + """Update Zone.""" + zone = self.get_zone(firewall, name) + zone.interface = ifaces + zone.apply() + self.zones[firewall][zone.name] = zone + + def delete_zone(self, name, firewall): + """Deletes an instance of a Zone.""" + obj = self.zones[firewall].pop(name) + obj.delete() diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/utils/panorama/policy.py b/webinars/panorama-ssot/nautobot_ssot_panorama/utils/panorama/policy.py new file mode 100644 index 0000000..8bb59da --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/utils/panorama/policy.py @@ -0,0 +1,85 @@ +"""Policy API.""" +from panos.policies import PreRulebase, PostRulebase, SecurityRule +from panos.panorama import DeviceGroup + +from .base import BaseAPI + + +class PanoramaPolicy(BaseAPI): + """Policy Objects API SDK.""" + + policies = {} + _rulebase = {"PRE": PreRulebase, "POST": PostRulebase} + + def _delete_instance(self, name, location): + """Deletes an instance of an PreRulebase or PostRulebase.""" + obj = self.policies[name].pop(location) + obj.delete() + + def get_rulebase(self, location, pre_post): + """Returns a prefetched instance.""" + if location == self.pano: + location = self.pano + loc_name = "shared" + elif isinstance(location, str): + location = self.device_groups[location] + loc_name = location.name + elif isinstance(location, DeviceGroup): + loc_name = location.name + else: + raise ValueError("Invalid location provided") + return self.policies[loc_name][pre_post], loc_name + + def get_security_rule(self, rulebase, rulename): # pylint: disable=no-self-use + """Returns a rule by name from rulebase.""" + for rule in rulebase[0].children: + if isinstance(rule, SecurityRule) and rule.name == rulename: + return rule + raise ValueError("Unable to find SecurityRule.") + + ##################### + # (Pre/Post)Rulebase + ##################### + + def create_security_rule(self, location, pre_post, name, **kwargs): + """Create SecurityRule.""" + rulebase, loc_name = self.get_rulebase(location, pre_post) + rulebase = rulebase[0] + location.add(rulebase) + rule = SecurityRule(name, **kwargs) + rulebase.add(rule) + rule.create() + # self.policies[loc_name][pre_post] = rulebase.refreshall() + return rule + + def retrieve_security_rules(self): + """Returns a dictionary with the location at the parent key and rules as values.""" + self.policies = {dg: {} for dg in self.device_groups.keys()} + self.policies["shared"] = {} + self.policies["shared"]["PRE"] = PreRulebase.refreshall(self.pano) + self.policies["shared"]["POST"] = PostRulebase.refreshall(self.pano) + for name, dev_group in self.device_groups.items(): + self.policies[name]["PRE"] = PreRulebase.refreshall(dev_group) + self.policies[name]["POST"] = PostRulebase.refreshall(dev_group) + return self.policies + + def update_security_rule(self, location, pre_post, name, **kwargs): + """Update a SecurityRule.""" + rulebase, loc_name = self.get_rulebase(location, pre_post) + rule = self.get_security_rule(rulebase, name) + for attr, value in kwargs.items(): + if hasattr(rule, attr): + setattr(rule, attr, value) + else: + raise ValueError(f"Unsupported attribute {attr}") + rule.apply() + # self.policies[loc_name][pre_post] = rulebase.refreshall() + return rule + + def delete_security_rule(self, location, pre_post, name): + """Delete a SecurityRule.""" + rulebase, loc_name = self.get_rulebase(location, pre_post) + rule = self.get_security_rule(rulebase, name) + rule.delete() + self.policies[loc_name][pre_post] = rulebase.refreshall() + return rule diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/utils/panorama/service.py b/webinars/panorama-ssot/nautobot_ssot_panorama/utils/panorama/service.py new file mode 100644 index 0000000..c245558 --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/utils/panorama/service.py @@ -0,0 +1,114 @@ +"""ServiceObject API.""" +from nautobot_firewall_models.choices import IP_PROTOCOL_CHOICES +from panos.objects import ServiceGroup, ServiceObject +from panos.predefined import Predefined + +from .base import BaseAPI + + +class PanoramaService(BaseAPI): + """Service Objects API SDK.""" + + services = {} + + def _delete_instance(self, name): + """Deletes an instance of an ServiceGroup or ServiceObject.""" + obj = self.services.pop(name) + obj.delete() + + def get(self, name): + """Returns a prefetched instance.""" + return self.services[name]["value"] + + def find_proper_protocol(self, desired_protocol): # pylint: disable=no-self-use, inconsistent-return-statements + """Returns Nautobot formatted protocol.""" + for protocol in IP_PROTOCOL_CHOICES: + if protocol[0].lower() == desired_protocol.lower(): + return protocol[0] + + ##################### + # ServiceGroup + ##################### + + def create_service_group(self, name, svc_objs, location=None): + """Creates ServiceObjects.""" + location = self._get_location(location) + group = ServiceGroup(name, value=svc_objs) + location.add(group) + group.create() + self.services[group.name] = { + "value": group, + "type": "group", + "location": "shared" if location == self.pano else location.name, + } + return group + + def retrieve_service_groups(self): + """Returns all ServiceGroups.""" + self.services.update(self._get_all_via_device_groups(ServiceGroup, "group")) + return self.services + + def update_service_group(self, name, svc_objs): + """Updates a single ServiceGroup.""" + group = self.get(name) + group.value = svc_objs + group.apply() + self.services[name]["value"] = group + return group + + def delete_service_group(self, name): + """Deletes a single ServiceGroup.""" + self._delete_instance(name) + + ##################### + # ServiceObject + ##################### + + def create_service_object(self, name, port, protocol, location=None): + """Creates ServiceObjects.""" + location = self._get_location(location) + svc = ServiceObject( + name, + protocol=protocol.lower(), + destination_port=port, + ) + location.add(svc) + svc.create() + self.services[svc.name] = { + "value": svc, + "type": "object", + "location": "shared" if location == self.pano else location.name, + } + return svc + + def retrieve_service_objects(self): + """Returns all ServiceObjects.""" + predefined = Predefined(self.pano) + predefined.refreshall_services() + self.services.update( + { + name: {"value": svc, "type": "object", "location": "predefined"} + for name, svc in predefined.service_objects.items() + } + ) + self.services.update(self._get_all_via_device_groups(ServiceObject, "object")) + return self.services + + def update_service_object(self, name, port=None, protocol=None): + """Updates a single ServiceObject.""" + if self.services[name]["location"] == "predefined": + raise ValueError("Unable to update predefined service.") + svc = self.get(name) + if port: + svc.destination_port = port + if protocol: + svc.protocol = protocol + svc.apply() + self.services[name]["value"] = svc + return svc + + def delete_service_object(self, name): + """Deletes a single ServiceObject.""" + if self.services[name]["location"] == "predefined": + raise ValueError("Unable to delete predefined application") + self._delete_instance(name) diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/utils/panorama/user.py b/webinars/panorama-ssot/nautobot_ssot_panorama/utils/panorama/user.py new file mode 100644 index 0000000..54a74e9 --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/utils/panorama/user.py @@ -0,0 +1,45 @@ +"""DynamicUserGroup API.""" +from panos.objects import DynamicUserGroup + +from .base import BaseAPI + + +class PanoramaUser(BaseAPI): + """DynamicUserGroup Objects API SDK.""" + + users = {} + + ##################### + # DynamicUserGroup + ##################### + + def get(self, name): + """Returns a prefetched instance.""" + return self.users[name]["value"] + + def create_dynamic_user_group(self, name, location=None): + """Create DynamicUserGroup.""" + location = self._get_location(location) + group = DynamicUserGroup(name) + location.add(group) + group.create() + self.users[group.name] = { + "value": group, + "type": "group", + "location": "shared" if location == self.pano else location.name, + } + return group + + def retrieve_dynamic_user_groups(self): + """Returns all DynamicUserGroup.""" + self.users.update(self._get_all_via_device_groups(DynamicUserGroup, "group")) + return self.users + + def update_dynamic_user_group(self): + """Not implemented.""" + raise NotImplementedError("Not implemented.") + + def delete_dynamic_user_group(self, name): + """Deletes an instance of an DynamicUserGroup.""" + obj = self.users.pop(name) + obj.delete() diff --git a/webinars/panorama-ssot/nautobot_ssot_panorama/views.py b/webinars/panorama-ssot/nautobot_ssot_panorama/views.py new file mode 100644 index 0000000..1c85ea9 --- /dev/null +++ b/webinars/panorama-ssot/nautobot_ssot_panorama/views.py @@ -0,0 +1,97 @@ +"""Plugin UI Views.""" +from nautobot.core.views import generic, mixins +from nautobot.dcim.models import Device + +from nautobot_ssot_panorama.models import VirtualSystem, LogicalGroup, ControlPlaneSystem +from nautobot_ssot_panorama import filters, forms, tables +from nautobot_ssot_panorama.api.serializers import ( + VirtualSystemSerializer, + LogicalGroupSerializer, + ControlPlaneSystemSerializer, +) + + +class ControlPlaneSystemUIViewSet( + mixins.ObjectDetailViewMixin, + mixins.ObjectListViewMixin, + mixins.ObjectEditViewMixin, + mixins.ObjectDestroyViewMixin, + mixins.ObjectBulkDestroyViewMixin, +): + """ViewSet for the ControlPlaneSystem model.""" + + filterset_class = filters.ControlPlaneSystemFilterSet + filterset_form_class = forms.ControlPlaneSystemFilterForm + form_class = forms.ControlPlaneSystemForm + queryset = ControlPlaneSystem.objects.all() + serializer_class = ControlPlaneSystemSerializer + table_class = tables.ControlPlaneSystemTable + action_buttons = ("add",) + + lookup_field = "pk" + + def _process_bulk_create_form(self, form): + """Bulk creating (CSV import) is not supported.""" + raise NotImplementedError() + + +class VirtualSystemUIViewSet( + mixins.ObjectDetailViewMixin, + mixins.ObjectListViewMixin, + mixins.ObjectEditViewMixin, + mixins.ObjectDestroyViewMixin, + mixins.ObjectBulkDestroyViewMixin, +): + """ViewSet for the VirtualSystem model.""" + + filterset_class = filters.VirtualSystemFilterSet + filterset_form_class = forms.VirtualSystemFilterForm + form_class = forms.VirtualSystemForm + queryset = VirtualSystem.objects.all() + serializer_class = VirtualSystemSerializer + table_class = tables.VirtualSystemTable + action_buttons = ("add",) + + lookup_field = "pk" + + def _process_bulk_create_form(self, form): + """Bulk creating (CSV import) is not supported.""" + raise NotImplementedError() + + +class LogicalGroupUIViewSet( + mixins.ObjectDetailViewMixin, + mixins.ObjectListViewMixin, + mixins.ObjectEditViewMixin, + mixins.ObjectDestroyViewMixin, + mixins.ObjectBulkDestroyViewMixin, +): + """ViewSet for the LogicalGroup model.""" + + filterset_class = filters.LogicalGroupFilterSet + filterset_form_class = forms.LogicalGroupFilterForm + form_class = forms.LogicalGroupForm + queryset = LogicalGroup.objects.all() + serializer_class = LogicalGroupSerializer + table_class = tables.LogicalGroupTable + action_buttons = ("add",) + + lookup_field = "pk" + + def _process_bulk_create_form(self, form): + """Bulk creating (CSV import) is not supported.""" + raise NotImplementedError() + + +class DeviceVirtualSystemTabView(generic.ObjectView): + """Add tab to Device view for VirtualSystem.""" + + queryset = Device.objects.all() + template_name = "nautobot_ssot_panorama/device_virtual_systems.html" + + +class DeviceLogicalGroupTabView(generic.ObjectView): + """Add tab to Device view for LogicalGroup.""" + + queryset = Device.objects.all() + template_name = "nautobot_ssot_panorama/device_logical_groups.html" diff --git a/webinars/panorama-ssot/poetry.lock b/webinars/panorama-ssot/poetry.lock new file mode 100644 index 0000000..19ea9fe --- /dev/null +++ b/webinars/panorama-ssot/poetry.lock @@ -0,0 +1,3516 @@ +[[package]] +name = "absl-py" +version = "1.3.0" +description = "Abseil Python Common Libraries, see https://github.com/abseil/abseil-py." +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "amqp" +version = "5.1.1" +description = "Low-level AMQP client for Python (fork of amqplib)." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +vine = ">=5.0.0" + +[[package]] +name = "aniso8601" +version = "7.0.0" +description = "A library for parsing ISO 8601 strings." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "appnope" +version = "0.1.3" +description = "Disable App Nap on macOS >= 10.9" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "asgiref" +version = "3.5.2" +description = "ASGI specs, helper code, and adapters" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +typing-extensions = {version = "*", markers = "python_version < \"3.8\""} + +[package.extras] +tests = ["pytest", "pytest-asyncio", "mypy (>=0.800)"] + +[[package]] +name = "astroid" +version = "2.11.7" +description = "An abstract syntax tree for Python with inference support." +category = "dev" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +lazy-object-proxy = ">=1.4.0" +typed-ast = {version = ">=1.4.0,<2.0", markers = "implementation_name == \"cpython\" and python_version < \"3.8\""} +typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""} +wrapt = ">=1.11,<2" + +[[package]] +name = "async-timeout" +version = "4.0.2" +description = "Timeout context manager for asyncio programs" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = {version = ">=3.6.5", markers = "python_version < \"3.8\""} + +[[package]] +name = "attrs" +version = "22.1.0" +description = "Classes Without Boilerplate" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.extras] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] +docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "cloudpickle"] + +[[package]] +name = "backcall" +version = "0.2.0" +description = "Specifications for callback functions passed in to an API" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "bandit" +version = "1.7.4" +description = "Security oriented static analyser for python code." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} +GitPython = ">=1.0.1" +PyYAML = ">=5.3.1" +stevedore = ">=1.20.0" + +[package.extras] +test = ["coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml", "beautifulsoup4 (>=4.8.0)", "pylint (==1.9.4)"] +toml = ["toml"] +yaml = ["pyyaml"] + +[[package]] +name = "bcrypt" +version = "4.0.1" +description = "Modern password hashing for your software and your servers" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +tests = ["pytest (>=3.2.1,!=3.3.0)"] +typecheck = ["mypy"] + +[[package]] +name = "billiard" +version = "3.6.4.0" +description = "Python multiprocessing fork with improvements and bugfixes" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "black" +version = "22.12.0" +description = "The uncompromising code formatter." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} +typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""} +typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "cached-property" +version = "1.5.2" +description = "A decorator for caching properties in classes." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "capirca" +version = "2.0.6" +description = "Capirca" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +absl-py = "*" +ipaddress = ">=1.0.22" +mock = "*" +ply = "*" +PyYAML = "*" +six = "*" + +[[package]] +name = "celery" +version = "5.2.7" +description = "Distributed Task Queue." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +billiard = ">=3.6.4.0,<4.0" +click = ">=8.0.3,<9.0" +click-didyoumean = ">=0.0.3" +click-plugins = ">=1.1.1" +click-repl = ">=0.2.0" +importlib-metadata = {version = ">=1.4.0", markers = "python_version < \"3.8\""} +kombu = ">=5.2.3,<6.0" +pytz = ">=2021.3" +vine = ">=5.0.0,<6.0" + +[package.extras] +arangodb = ["pyArango (>=1.3.2)"] +auth = ["cryptography"] +azureblockblob = ["azure-storage-blob (==12.9.0)"] +brotli = ["brotli (>=1.0.0)", "brotlipy (>=0.7.0)"] +cassandra = ["cassandra-driver (<3.21.0)"] +consul = ["python-consul2"] +cosmosdbsql = ["pydocumentdb (==2.3.2)"] +couchbase = ["couchbase (>=3.0.0)"] +couchdb = ["pycouchdb"] +django = ["Django (>=1.11)"] +dynamodb = ["boto3 (>=1.9.178)"] +elasticsearch = ["elasticsearch"] +eventlet = ["eventlet (>=0.32.0)"] +gevent = ["gevent (>=1.5.0)"] +librabbitmq = ["librabbitmq (>=1.5.0)"] +memcache = ["pylibmc"] +mongodb = ["pymongo[srv] (>=3.11.1)"] +msgpack = ["msgpack"] +pymemcache = ["python-memcached"] +pyro = ["pyro4"] +pytest = ["pytest-celery"] +redis = ["redis (>=3.4.1,!=4.0.0,!=4.0.1)"] +s3 = ["boto3 (>=1.9.125)"] +slmq = ["softlayer-messaging (>=1.0.3)"] +solar = ["ephem"] +sqlalchemy = ["sqlalchemy"] +sqs = ["kombu"] +tblib = ["tblib (>=1.3.0)", "tblib (>=1.5.0)"] +yaml = ["PyYAML (>=3.10)"] +zookeeper = ["kazoo (>=1.3.1)"] +zstd = ["zstandard"] + +[[package]] +name = "certifi" +version = "2022.12.7" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "cffi" +version = "1.15.1" +description = "Foreign Function Interface for Python calling C code." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "2.1.1" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" +optional = false +python-versions = ">=3.6.0" + +[package.extras] +unicode_backport = ["unicodedata2"] + +[[package]] +name = "click" +version = "8.1.3" +description = "Composable command line interface toolkit" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} + +[[package]] +name = "click-didyoumean" +version = "0.3.0" +description = "Enables git-like *did-you-mean* feature in click" +category = "main" +optional = false +python-versions = ">=3.6.2,<4.0.0" + +[package.dependencies] +click = ">=7" + +[[package]] +name = "click-plugins" +version = "1.1.1" +description = "An extension module for click to enable registering CLI commands via setuptools entry-points." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +click = ">=4.0" + +[package.extras] +dev = ["pytest (>=3.6)", "pytest-cov", "wheel", "coveralls"] + +[[package]] +name = "click-repl" +version = "0.2.0" +description = "REPL plugin for Click" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +click = "*" +prompt-toolkit = "*" +six = "*" + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" + +[[package]] +name = "coreapi" +version = "2.3.3" +description = "Python client library for Core API." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +coreschema = "*" +itypes = "*" +requests = "*" +uritemplate = "*" + +[[package]] +name = "coreschema" +version = "0.0.4" +description = "Core Schema." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +jinja2 = "*" + +[[package]] +name = "coverage" +version = "6.5.0" +description = "Code coverage measurement for Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "cryptography" +version = "38.0.4" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +cffi = ">=1.12" + +[package.extras] +docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] +docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"] +pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] +sdist = ["setuptools-rust (>=0.11.4)"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"] + +[[package]] +name = "cycler" +version = "0.11.0" +description = "Composable style cycles" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +category = "dev" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "deepdiff" +version = "5.8.1" +description = "Deep Difference and Search of any Python object/data." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +ordered-set = ">=4.1.0,<4.2.0" + +[package.extras] +cli = ["click (==8.0.3)", "pyyaml (==5.4.1)", "toml (==0.10.2)", "clevercsv (==0.7.1)"] + +[[package]] +name = "defusedxml" +version = "0.7.1" +description = "XML bomb protection for Python stdlib modules" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "diffsync" +version = "1.7.0" +description = "Library to easily sync/diff/update 2 different data sources" +category = "main" +optional = false +python-versions = ">=3.7,<4.0" + +[package.dependencies] +colorama = ">=0.4.3,<0.5.0" +packaging = ">=21.3,<22.0" +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0" +structlog = ">=20.1.0,<22.0.0" + +[package.extras] +redis = ["redis (>=4.3,<5.0)"] + +[[package]] +name = "dill" +version = "0.3.6" +description = "serialize all of python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +graph = ["objgraph (>=1.7.2)"] + +[[package]] +name = "django" +version = "3.2.16" +description = "A high-level Python Web framework that encourages rapid development and clean, pragmatic design." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +asgiref = ">=3.3.2,<4" +pytz = "*" +sqlparse = ">=0.2.2" + +[package.extras] +argon2 = ["argon2-cffi (>=19.1.0)"] +bcrypt = ["bcrypt"] + +[[package]] +name = "django-ajax-tables" +version = "1.1.1" +description = "Django tag for ajax-enabled tables" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "django-appconf" +version = "1.0.5" +description = "A helper class for handling configuration defaults of packaged apps gracefully." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +django = "*" + +[[package]] +name = "django-cacheops" +version = "6.0" +description = "A slick ORM cache with automatic granular event-driven invalidation for Django." +category = "main" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +django = ">=2.1" +funcy = ">=1.8,<2.0" +redis = ">=3.0.0" +six = ">=1.4.0" + +[[package]] +name = "django-celery-beat" +version = "2.2.1" +description = "Database-backed Periodic Tasks." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +celery = ">=5.0,<6.0" +Django = ">=2.2,<4.0" +django-timezone-field = ">=4.1.0,<5.0" +python-crontab = ">=2.3.4" + +[[package]] +name = "django-constance" +version = "2.9.1" +description = "Django live settings with pluggable backends, including Redis." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +django-picklefield = {version = "*", optional = true, markers = "extra == \"database\""} + +[package.extras] +database = ["django-picklefield"] +redis = ["redis"] + +[[package]] +name = "django-cors-headers" +version = "3.13.0" +description = "django-cors-headers is a Django application for handling the server headers required for Cross-Origin Resource Sharing (CORS)." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +Django = ">=3.2" + +[[package]] +name = "django-cryptography" +version = "1.0" +description = "Easily encrypt data in Django" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +cryptography = "*" +django-appconf = "*" + +[[package]] +name = "django-db-file-storage" +version = "0.5.5" +description = "Custom FILE_STORAGE for Django. Saves files in your database instead of your file system." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +Django = "*" + +[[package]] +name = "django-debug-toolbar" +version = "3.8.1" +description = "A configurable set of panels that display various debug information about the current request/response." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +django = ">=3.2.4" +sqlparse = ">=0.2" + +[[package]] +name = "django-extensions" +version = "3.2.1" +description = "Extensions for Django" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +Django = ">=3.2" + +[[package]] +name = "django-filter" +version = "21.1" +description = "Django-filter is a reusable Django application for allowing users to filter querysets dynamically." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +Django = ">=2.2" + +[[package]] +name = "django-health-check" +version = "3.16.5" +description = "Run checks on services like databases, queue servers, celery processes, etc." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +django = ">=2.2" + +[[package]] +name = "django-jinja" +version = "2.10.2" +description = "Jinja2 templating language integrated in Django." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +django = ">=2.2" +jinja2 = ">=3" + +[[package]] +name = "django-js-asset" +version = "2.0.0" +description = "script tag with additional attributes for django.forms.Media" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +Django = ">=2.2" + +[package.extras] +tests = ["coverage"] + +[[package]] +name = "django-mptt" +version = "0.14.0" +description = "Utilities for implementing Modified Preorder Tree Traversal with your Django Models and working with trees of Model instances." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +django-js-asset = "*" + +[package.extras] +tests = ["coverage", "mock-django"] + +[[package]] +name = "django-picklefield" +version = "3.1" +description = "Pickled object field for Django" +category = "main" +optional = false +python-versions = ">=3" + +[package.dependencies] +Django = ">=3.2" + +[package.extras] +tests = ["tox"] + +[[package]] +name = "django-pivot" +version = "1.9.0" +description = "Create pivot tables and histograms from ORM querysets" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +django = ">=2.2.0" + +[[package]] +name = "django-prometheus" +version = "2.2.0" +description = "Django middlewares to monitor your application with Prometheus.io." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +prometheus-client = ">=0.7" + +[[package]] +name = "django-redis" +version = "5.2.0" +description = "Full featured redis cache backend for Django." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +Django = ">=2.2" +redis = ">=3,<4.0.0 || >4.0.0,<4.0.1 || >4.0.1" + +[package.extras] +hiredis = ["redis[hiredis] (>=3,!=4.0.0,!=4.0.1)"] + +[[package]] +name = "django-rq" +version = "2.5.1" +description = "An app that provides django integration for RQ (Redis Queue)" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +django = ">=2.0" +redis = ">=3" +rq = ">=1.2" + +[package.extras] +sentry = ["raven (>=6.1.0)"] +testing = ["mock (>=2.0.0)"] + +[[package]] +name = "django-tables2" +version = "2.4.1" +description = "Table/data-grid framework for Django" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +Django = ">=1.11" + +[package.extras] +tablib = ["tablib"] + +[[package]] +name = "django-taggit" +version = "3.0.0" +description = "django-taggit is a reusable Django application for simple tagging." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +Django = ">=3.2" + +[[package]] +name = "django-timezone-field" +version = "4.1.2" +description = "A Django app providing database and form fields for pytz timezone objects." +category = "main" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +django = ">=2.2" +pytz = "*" + +[package.extras] +rest_framework = ["djangorestframework (>=3.0.0)"] + +[[package]] +name = "django-tree-queries" +version = "0.11.0" +description = "Tree queries with explicit opt-in, without configurability" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +tests = ["coverage"] + +[[package]] +name = "django-webserver" +version = "1.2.0" +description = "Django management commands for production webservers" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +Django = "*" + +[package.extras] +gunicorn = ["gunicorn"] +pyuwsgi = ["pyuwsgi"] +test = ["pytest", "mock"] +uvicorn = ["uvicorn (>0.6)"] +waitress = ["waitress"] + +[[package]] +name = "djangorestframework" +version = "3.14.0" +description = "Web APIs for Django, made easy." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +django = ">=3.0" +pytz = "*" + +[[package]] +name = "drf-spectacular" +version = "0.24.2" +description = "Sane and flexible OpenAPI 3 schema generation for Django REST framework" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +Django = ">=2.2" +djangorestframework = ">=3.10" +drf-spectacular-sidecar = {version = "*", optional = true, markers = "extra == \"sidecar\""} +inflection = ">=0.3.1" +jsonschema = ">=2.6.0" +PyYAML = ">=5.1" +typing-extensions = {version = "*", markers = "python_version < \"3.8\""} +uritemplate = ">=2.0.0" + +[package.extras] +offline = ["drf-spectacular-sidecar"] +sidecar = ["drf-spectacular-sidecar"] + +[[package]] +name = "drf-spectacular-sidecar" +version = "2022.12.1" +description = "Serve self-contained distribution builds of Swagger UI and Redoc with Django" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +Django = ">=2.2" + +[[package]] +name = "drf-yasg" +version = "1.21.4" +description = "Automated generation of real Swagger/OpenAPI 2.0 schemas from Django Rest Framework code." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +coreapi = ">=2.3.3" +coreschema = ">=0.0.4" +django = ">=2.2.16" +djangorestframework = ">=3.10.3" +inflection = ">=0.3.1" +packaging = ">=21.0" +pytz = ">=2021.1" +"ruamel.yaml" = ">=0.16.13" +swagger-spec-validator = {version = ">=2.1.0", optional = true, markers = "extra == \"validation\""} +uritemplate = ">=3.0.0" + +[package.extras] +validation = ["swagger-spec-validator (>=2.1.0)"] + +[[package]] +name = "exceptiongroup" +version = "1.0.4" +description = "Backport of PEP 654 (exception groups)" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "flake8" +version = "3.9.2" +description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + +[package.dependencies] +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} +mccabe = ">=0.6.0,<0.7.0" +pycodestyle = ">=2.7.0,<2.8.0" +pyflakes = ">=2.3.0,<2.4.0" + +[[package]] +name = "fonttools" +version = "4.38.0" +description = "Tools to manipulate font files" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +all = ["fs (>=2.2.0,<3)", "lxml (>=4.0,<5)", "zopfli (>=0.1.4)", "lz4 (>=1.7.4.2)", "matplotlib", "sympy", "skia-pathops (>=0.5.0)", "uharfbuzz (>=0.23.0)", "brotlicffi (>=0.8.0)", "scipy", "brotli (>=1.0.1)", "munkres", "unicodedata2 (>=14.0.0)", "xattr"] +graphite = ["lz4 (>=1.7.4.2)"] +interpolatable = ["scipy", "munkres"] +lxml = ["lxml (>=4.0,<5)"] +pathops = ["skia-pathops (>=0.5.0)"] +plot = ["matplotlib"] +repacker = ["uharfbuzz (>=0.23.0)"] +symfont = ["sympy"] +type1 = ["xattr"] +ufo = ["fs (>=2.2.0,<3)"] +unicode = ["unicodedata2 (>=14.0.0)"] +woff = ["zopfli (>=0.1.4)", "brotlicffi (>=0.8.0)", "brotli (>=1.0.1)"] + +[[package]] +name = "funcy" +version = "1.17" +description = "A fancy and practical functional tools" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "future" +version = "0.18.2" +description = "Clean single-source support for Python 3 and 2" +category = "main" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "ghp-import" +version = "2.1.0" +description = "Copy your docs directly to the gh-pages branch." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +python-dateutil = ">=2.8.1" + +[package.extras] +dev = ["twine", "markdown", "flake8", "wheel"] + +[[package]] +name = "gitdb" +version = "4.0.10" +description = "Git Object Database" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +smmap = ">=3.0.1,<6" + +[[package]] +name = "gitpython" +version = "3.1.29" +description = "GitPython is a python library used to interact with Git repositories" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +gitdb = ">=4.0.1,<5" +typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\""} + +[[package]] +name = "graphene" +version = "2.1.9" +description = "GraphQL Framework for Python" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +aniso8601 = ">=3,<=7" +graphql-core = ">=2.1,<3" +graphql-relay = ">=2,<3" +six = ">=1.10.0,<2" + +[package.extras] +django = ["graphene-django"] +sqlalchemy = ["graphene-sqlalchemy"] +test = ["pytest", "pytest-benchmark", "pytest-cov", "pytest-mock", "fastdiff (==0.2.0)", "snapshottest", "coveralls", "promise", "six", "mock", "pytz", "iso8601"] + +[[package]] +name = "graphene-django" +version = "2.15.0" +description = "Graphene Django integration" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +Django = ">=1.11" +graphene = ">=2.1.7,<3" +graphql-core = ">=2.1.0,<3" +promise = ">=2.1" +singledispatch = ">=3.4.0.3" +six = ">=1.10.0" +text-unidecode = "*" + +[package.extras] +dev = ["black (==19.10b0)", "flake8 (==3.7.9)", "flake8-black (==0.1.1)", "flake8-bugbear (==20.1.4)", "pytest (>=3.6.3)", "pytest-cov", "coveralls", "mock", "pytz", "pytest-django (>=3.3.2)", "djangorestframework (>=3.6.3)", "django-filter (<2)", "django-filter (>=2)"] +rest_framework = ["djangorestframework (>=3.6.3)"] +test = ["pytest (>=3.6.3)", "pytest-cov", "coveralls", "mock", "pytz", "pytest-django (>=3.3.2)", "djangorestframework (>=3.6.3)", "django-filter (<2)", "django-filter (>=2)"] + +[[package]] +name = "graphene-django-optimizer" +version = "0.8.0" +description = "Optimize database access inside graphene queries." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "graphql-core" +version = "2.3.2" +description = "GraphQL implementation for Python" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +promise = ">=2.3,<3" +rx = ">=1.6,<2" +six = ">=1.10.0" + +[package.extras] +gevent = ["gevent (>=1.1)"] +test = ["six (==1.14.0)", "pyannotate (==1.2.0)", "pytest (==4.6.10)", "pytest-django (==3.9.0)", "pytest-cov (==2.8.1)", "coveralls (==1.11.1)", "cython (==0.29.17)", "gevent (==1.5.0)", "pytest-benchmark (==3.2.3)", "pytest-mock (==2.0.0)"] + +[[package]] +name = "graphql-relay" +version = "2.0.1" +description = "Relay implementation for Python" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +graphql-core = ">=2.2,<3" +promise = ">=2.2,<3" +six = ">=1.12" + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "importlib-metadata" +version = "4.13.0" +description = "Read metadata from Python packages" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} +zipp = ">=0.5" + +[package.extras] +docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "jaraco.tidelift (>=1.4)"] +perf = ["ipython"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] + +[[package]] +name = "importlib-resources" +version = "5.10.1" +description = "Read resources from Python packages" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "jaraco.tidelift (>=1.4)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "pytest-flake8"] + +[[package]] +name = "inflection" +version = "0.5.1" +description = "A port of Ruby on Rails inflector to Python" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "iniconfig" +version = "1.1.1" +description = "iniconfig: brain-dead simple config-ini parsing" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "invoke" +version = "1.7.3" +description = "Pythonic task execution" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "ipaddress" +version = "1.0.23" +description = "IPv4/IPv6 manipulation library" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "ipython" +version = "7.34.0" +description = "IPython: Productive Interactive Computing" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +appnope = {version = "*", markers = "sys_platform == \"darwin\""} +backcall = "*" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} +pickleshare = "*" +prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" +pygments = "*" +traitlets = ">=4.2" + +[package.extras] +all = ["Sphinx (>=1.3)", "ipykernel", "ipyparallel", "ipywidgets", "nbconvert", "nbformat", "nose (>=0.10.1)", "notebook", "numpy (>=1.17)", "pygments", "qtconsole", "requests", "testpath"] +doc = ["Sphinx (>=1.3)"] +kernel = ["ipykernel"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["notebook", "ipywidgets"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["nose (>=0.10.1)", "requests", "testpath", "pygments", "nbformat", "ipykernel", "numpy (>=1.17)"] + +[[package]] +name = "isort" +version = "5.11.2" +description = "A Python utility / library to sort Python imports." +category = "dev" +optional = false +python-versions = ">=3.7.0" + +[package.extras] +pipfile-deprecated-finder = ["pipreqs", "requirementslib"] +requirements-deprecated-finder = ["pipreqs", "pip-api"] +colors = ["colorama (>=0.4.3,<0.5.0)"] +plugins = ["setuptools"] + +[[package]] +name = "itypes" +version = "1.2.0" +description = "Simple immutable types for python." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "jedi" +version = "0.18.2" +description = "An autocompletion tool for Python that can be used for text editors." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +parso = ">=0.8.0,<0.9.0" + +[package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx-rtd-theme (==0.4.3)", "sphinx (==1.8.5)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] + +[[package]] +name = "jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonschema" +version = "4.7.2" +description = "An implementation of JSON Schema validation for Python" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +attrs = ">=17.4.0" +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} +importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} +pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" +typing-extensions = {version = "*", markers = "python_version < \"3.8\""} + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "junos-eznc" +version = "2.6.6" +description = "Junos 'EZ' automation for non-programmers" +category = "main" +optional = false +python-versions = ">=3.*, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.dependencies] +jinja2 = ">=2.7.1" +lxml = ">=3.2.4" +ncclient = "0.6.13" +netaddr = "*" +paramiko = ">=1.15.2" +pyparsing = "*" +pyserial = "*" +PyYAML = ">=5.1" +scp = ">=0.7.0" +six = "*" +transitions = "*" +yamlordereddictloader = "*" + +[[package]] +name = "kiwisolver" +version = "1.4.4" +description = "A fast implementation of the Cassowary constraint solver" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +typing-extensions = {version = "*", markers = "python_version < \"3.8\""} + +[[package]] +name = "kombu" +version = "5.2.4" +description = "Messaging library for Python." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +amqp = ">=5.0.9,<6.0.0" +cached-property = {version = "*", markers = "python_version < \"3.8\""} +importlib-metadata = {version = ">=0.18", markers = "python_version < \"3.8\""} +vine = "*" + +[package.extras] +azureservicebus = ["azure-servicebus (>=7.0.0)"] +azurestoragequeues = ["azure-storage-queue"] +consul = ["python-consul (>=0.6.0)"] +librabbitmq = ["librabbitmq (>=2.0.0)"] +mongodb = ["pymongo (>=3.3.0,<3.12.1)"] +msgpack = ["msgpack"] +pyro = ["pyro4"] +qpid = ["qpid-python (>=0.26)", "qpid-tools (>=0.26)"] +redis = ["redis (>=3.4.1,!=4.0.0,!=4.0.1)"] +slmq = ["softlayer-messaging (>=1.0.3)"] +sqlalchemy = ["sqlalchemy"] +sqs = ["boto3 (>=1.9.12)", "pycurl (>=7.44.1,<7.45.0)", "urllib3 (>=1.26.7)"] +yaml = ["PyYAML (>=3.10)"] +zookeeper = ["kazoo (>=1.3.1)"] + +[[package]] +name = "lazy-object-proxy" +version = "1.8.0" +description = "A fast and thorough lazy object proxy." +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "lxml" +version = "4.9.2" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html5 = ["html5lib"] +htmlsoup = ["beautifulsoup4"] +source = ["Cython (>=0.29.7)"] + +[[package]] +name = "markdown" +version = "3.3.7" +description = "Python implementation of Markdown." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} + +[package.extras] +testing = ["coverage", "pyyaml"] + +[[package]] +name = "markupsafe" +version = "2.1.1" +description = "Safely add untrusted strings to HTML/XML markup." +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "matplotlib" +version = "3.5.3" +description = "Python plotting package" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +cycler = ">=0.10" +fonttools = ">=4.22.0" +kiwisolver = ">=1.0.1" +numpy = ">=1.17" +packaging = ">=20.0" +pillow = ">=6.2.0" +pyparsing = ">=2.2.1" +python-dateutil = ">=2.7" +setuptools_scm = ">=4,<7" + +[[package]] +name = "matplotlib-inline" +version = "0.1.6" +description = "Inline Matplotlib backend for Jupyter" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +traitlets = "*" + +[[package]] +name = "mccabe" +version = "0.6.1" +description = "McCabe checker, plugin for flake8" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "mergedeep" +version = "1.3.4" +description = "A deep merge function for 🐍." +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "mkdocs" +version = "1.4.2" +description = "Project documentation with Markdown." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +click = ">=7.0" +colorama = {version = ">=0.4", markers = "platform_system == \"Windows\""} +ghp-import = ">=1.0" +importlib-metadata = {version = ">=4.3", markers = "python_version < \"3.10\""} +jinja2 = ">=2.11.1" +markdown = ">=3.2.1,<3.4" +mergedeep = ">=1.3.4" +packaging = ">=20.5" +pyyaml = ">=5.1" +pyyaml-env-tag = ">=0.1" +typing-extensions = {version = ">=3.10", markers = "python_version < \"3.8\""} +watchdog = ">=2.0" + +[package.extras] +i18n = ["babel (>=2.9.0)"] +min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-import (==1.0)", "importlib-metadata (==4.3)", "jinja2 (==2.11.1)", "markdown (==3.2.1)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "packaging (==20.5)", "pyyaml-env-tag (==0.1)", "pyyaml (==5.1)", "typing-extensions (==3.10)", "watchdog (==2.0)"] + +[[package]] +name = "mock" +version = "4.0.3" +description = "Rolling backport of unittest.mock for all Pythons" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +build = ["twine", "wheel", "blurb"] +docs = ["sphinx"] +test = ["pytest (<5.4)", "pytest-cov"] + +[[package]] +name = "mypy-extensions" +version = "0.4.3" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "napalm" +version = "4.0.0" +description = "Network Automation and Programmability Abstraction Layer with Multivendor support" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +cffi = ">=1.11.3" +future = "*" +jinja2 = "*" +junos-eznc = ">=2.6.3" +lxml = ">=4.3.0" +ncclient = "*" +netaddr = "*" +netmiko = ">=4.0.0" +netutils = ">=1.0.0" +paramiko = ">=2.6.0" +pyeapi = ">=0.8.2" +pyYAML = "*" +requests = ">=2.7.0" +scp = "*" +textfsm = "<=1.1.2" +ttp = "*" +ttp-templates = "*" +typing-extensions = ">=4.3.0" + +[[package]] +name = "nautobot" +version = "1.5.5" +description = "Source of truth and network automation platform." +category = "main" +optional = false +python-versions = ">=3.7,<4.0" + +[package.dependencies] +celery = ">=5.2.7,<5.3.0" +Django = ">=3.2.16,<3.3.0" +django-ajax-tables = ">=1.1.1,<1.2.0" +django-cacheops = ">=6.0,<6.1" +django-celery-beat = ">=2.2.1,<2.3.0" +django-constance = {version = ">=2.9.0,<2.10.0", extras = ["database"]} +django-cors-headers = ">=3.13.0,<3.14.0" +django-cryptography = ">=1.0,<1.1" +django-db-file-storage = ">=0.5.5,<0.6.0" +django-extensions = ">=3.2.0,<3.3.0" +django-filter = ">=21.1,<21.2" +django-health-check = ">=3.16.5,<3.17.0" +django-jinja = ">=2.10.2,<2.11.0" +django-mptt = ">=0.14.0,<0.15.0" +django-prometheus = ">=2.2.0,<2.3.0" +django-redis = ">=5.2.0,<5.3.0" +django-rq = ">=2.5.1,<2.6.0" +django-tables2 = ">=2.4.1,<2.5.0" +django-taggit = ">=3.0.0,<3.1.0" +django-timezone-field = ">=4.1.2,<4.2.0" +django-tree-queries = ">=0.11,<0.12" +django-webserver = ">=1.2.0,<1.3.0" +djangorestframework = ">=3.14.0,<3.15.0" +drf-spectacular = {version = ">=0.24.2,<0.25.0", extras = ["sidecar"]} +drf-yasg = {version = ">=1.20.0,<2.0.0", extras = ["validation"]} +GitPython = ">=3.1.29,<3.2.0" +graphene-django = ">=2.15.0,<2.16.0" +graphene-django-optimizer = ">=0.8.0,<0.9.0" +importlib-metadata = {version = ">=4.4,<5.0", markers = "python_version < \"3.8\""} +Jinja2 = ">=3.1.0,<3.2.0" +jsonschema = ">=4.7.0,<4.8.0" +Markdown = ">=3.3.7,<3.4.0" +MarkupSafe = ">=2.1.1,<2.2.0" +netaddr = ">=0.8.0,<0.9.0" +netutils = ">=1.3.0,<1.4.0" +Pillow = ">=9.3.0,<9.4.0" +prometheus-client = ">=0.14.1,<0.15.0" +psycopg2-binary = ">=2.9.5,<2.10.0" +pycryptodome = ">=3.13.0,<3.14.0" +pyuwsgi = ">=2.0.21,<2.1.0" +PyYAML = ">=6.0,<6.1" +social-auth-app-django = ">=5.0.0,<5.1.0" +svgwrite = ">=1.4.2,<1.5.0" + +[package.extras] +all = ["django-auth-ldap (>=4.1.0,<4.2.0)", "django-storages (>=1.12.3,<1.13.0)", "mysqlclient (>=2.1.0,<2.2.0)", "napalm (>=3.4.1,<3.5.0)", "social-auth-core[openidconnect,saml] (>=4.3.0,<4.4.0)"] +ldap = ["django-auth-ldap (>=4.1.0,<4.2.0)"] +remote-storage = ["django-storages (>=1.12.3,<1.13.0)"] +mysql = ["mysqlclient (>=2.1.0,<2.2.0)"] +napalm = ["napalm (>=3.4.1,<3.5.0)"] +sso = ["social-auth-core[openidconnect,saml] (>=4.3.0,<4.4.0)"] + +[[package]] +name = "nautobot-firewall-models" +version = "1.2.0a2" +description = "Nautobot plugin to model firewall objects." +category = "main" +optional = false +python-versions = ">=3.7,<4.0" + +[package.dependencies] +capirca = ">=2.0.6,<3.0.0" +netutils = ">=1.0.0,<2.0.0" + +[package.extras] +nautobot = ["nautobot (>=1.4.0,<2.0.0)"] + +[[package]] +name = "nautobot-golden-config" +version = "1.3.0" +description = "A plugin for configuration on nautobot" +category = "main" +optional = false +python-versions = "^3.7" +develop = false + +[package.dependencies] +deepdiff = "^5.5.0" +django-pivot = "^1.8.1" +matplotlib = "^3.3.2" +nautobot = ">=1.4.0" +nautobot-plugin-nornir = ">=1.0.0" + +[package.source] +type = "git" +url = "https://github.com/whitej6/nautobot-plugin-golden-config.git" +reference = "jlw-json-fix" +resolved_reference = "2ca366afc7357140ec64db243197f4d67ca25d70" + +[[package]] +name = "nautobot-plugin-nornir" +version = "1.0.0" +description = "Nautobot Nornir plugin." +category = "main" +optional = false +python-versions = ">=3.6,<4.0" + +[package.dependencies] +nautobot = ">=1.2.0" +netutils = ">=1.0.0" +nornir-nautobot = ">=2.2.0,<3.0.0" + +[[package]] +name = "nautobot-ssot" +version = "1.2.0" +description = "Nautobot Single Source of Truth" +category = "main" +optional = false +python-versions = ">=3.7,<4.0" + +[package.dependencies] +diffsync = ">=1.6.0,<2.0.0" +Markdown = "!=3.3.5" +nautobot = "*" +packaging = ">=21.3,<22.0" + +[[package]] +name = "ncclient" +version = "0.6.13" +description = "Python library for NETCONF clients" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.dependencies] +lxml = ">=3.3.0" +paramiko = ">=1.15.0" +six = "*" + +[[package]] +name = "netaddr" +version = "0.8.0" +description = "A network address manipulation library for Python" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "netmiko" +version = "4.1.2" +description = "Multi-vendor library to simplify legacy CLI connections to network devices" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +ntc-templates = ">=2.0.0" +paramiko = ">=2.7.2" +pyserial = "*" +pyyaml = ">=5.3" +scp = ">=0.13.3" +tenacity = "*" +textfsm = "1.1.2" + +[[package]] +name = "netutils" +version = "1.3.0" +description = "Common helper functions useful in network automation." +category = "main" +optional = false +python-versions = ">=3.7,<4.0" + +[package.extras] +optionals = ["napalm (>=4.0.0,<5.0.0)"] + +[[package]] +name = "nornir" +version = "3.3.0" +description = "Pluggable multi-threaded framework with inventory management to help operate collections of devices" +category = "main" +optional = false +python-versions = ">=3.7,<4.0" + +[package.dependencies] +importlib-metadata = {version = ">=4,<5", markers = "python_version < \"3.10\""} +mypy_extensions = ">=0.4.1,<0.5.0" +"ruamel.yaml" = ">=0.17" +typing_extensions = ">=4.1,<5.0" + +[package.extras] +docs = ["sphinx (>=4,<5)", "sphinx_rtd_theme (>=1.0,<2.0)", "sphinxcontrib-napoleon (>=0.7,<0.8)", "jupyter (>=1,<2)", "nbsphinx (>=0.8,<0.9)", "pygments (>=2,<3)", "sphinx-issues (>=3.0,<4.0)"] + +[[package]] +name = "nornir-jinja2" +version = "0.2.0" +description = "Jinja2 plugins for nornir" +category = "main" +optional = false +python-versions = ">=3.6,<4.0" + +[package.dependencies] +jinja2 = ">=2.11.2,<4" +nornir = ">=3,<4" + +[[package]] +name = "nornir-napalm" +version = "0.3.0" +description = "NAPALM's plugins for nornir" +category = "main" +optional = false +python-versions = ">=3.7,<4.0" + +[package.dependencies] +napalm = ">=4,<5" +nornir = ">=3,<4" + +[[package]] +name = "nornir-nautobot" +version = "2.3.0" +description = "Nornir Nautobot" +category = "main" +optional = false +python-versions = ">=3.7,<4.0" + +[package.dependencies] +netutils = ">=1,<2" +nornir = ">=3.0.0,<4.0.0" +nornir-jinja2 = ">=0,<1" +nornir-napalm = ">=0,<1" +nornir-netmiko = ">=0,<1" +nornir-utils = ">=0,<1" +pynautobot = ">=1.0.1,<2.0.0" +requests = ">=2.25.1,<3.0.0" + +[[package]] +name = "nornir-netmiko" +version = "0.2.0" +description = "Netmiko's plugins for Nornir" +category = "main" +optional = false +python-versions = ">=3.7,<4.0" + +[package.dependencies] +netmiko = ">=4.0.0,<5.0.0" +textfsm = "1.1.2" + +[[package]] +name = "nornir-utils" +version = "0.2.0" +description = "Collection of plugins and functions for nornir that don't require external dependencies" +category = "main" +optional = false +python-versions = ">=3.6.2,<4.0.0" + +[package.dependencies] +colorama = ">=0.4.3,<0.5.0" +nornir = ">=3,<4" + +[[package]] +name = "ntc-templates" +version = "3.1.0" +description = "TextFSM Templates for Network Devices, and Python wrapper for TextFSM's CliTable." +category = "main" +optional = false +python-versions = ">=3.6,<4.0" + +[package.dependencies] +textfsm = ">=1.1.0,<2.0.0" + +[[package]] +name = "numpy" +version = "1.21.1" +description = "NumPy is the fundamental package for array computing with Python." +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "oauthlib" +version = "3.2.2" +description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +rsa = ["cryptography (>=3.0.0)"] +signals = ["blinker (>=1.4.0)"] +signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] + +[[package]] +name = "ordered-set" +version = "4.1.0" +description = "An OrderedSet is a custom MutableSet that remembers its order, so that every" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +dev = ["pytest", "black", "mypy"] + +[[package]] +name = "packaging" +version = "21.3" +description = "Core utilities for Python packages" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" + +[[package]] +name = "pan-os-python" +version = "1.7.3" +description = "Framework for interacting with Palo Alto Networks devices via API" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.dependencies] +pan-python = ">=0.17.0,<0.18.0" + +[[package]] +name = "pan-python" +version = "0.17.0" +description = "Multi-tool set for Palo Alto Networks PAN-OS, Panorama, WildFire and AutoFocus" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "paramiko" +version = "2.12.0" +description = "SSH2 protocol library" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +bcrypt = ">=3.1.3" +cryptography = ">=2.5" +pynacl = ">=1.0.1" +six = "*" + +[package.extras] +all = ["pyasn1 (>=0.1.7)", "pynacl (>=1.0.1)", "bcrypt (>=3.1.3)", "invoke (>=1.3)", "gssapi (>=1.4.1)", "pywin32 (>=2.1.8)"] +ed25519 = ["pynacl (>=1.0.1)", "bcrypt (>=3.1.3)"] +gssapi = ["pyasn1 (>=0.1.7)", "gssapi (>=1.4.1)", "pywin32 (>=2.1.8)"] +invoke = ["invoke (>=1.3)"] + +[[package]] +name = "parso" +version = "0.8.3" +description = "A Python Parser" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +testing = ["docopt", "pytest (<6.0.0)"] + +[[package]] +name = "pathspec" +version = "0.10.3" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "pbr" +version = "5.11.0" +description = "Python Build Reasonableness" +category = "dev" +optional = false +python-versions = ">=2.6" + +[[package]] +name = "pexpect" +version = "4.8.0" +description = "Pexpect allows easy control of interactive console applications." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "pickleshare" +version = "0.7.5" +description = "Tiny 'shelve'-like database with concurrency support" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "pillow" +version = "9.3.0" +description = "Python Imaging Library (Fork)" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinxext-opengraph"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "platformdirs" +version = "2.6.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx-autodoc-typehints (>=1.19.4)", "sphinx (>=5.3)"] +test = ["appdirs (==1.4.4)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest (>=7.2)"] + +[[package]] +name = "pluggy" +version = "1.0.0" +description = "plugin and hook calling mechanisms for python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "ply" +version = "3.11" +description = "Python Lex & Yacc" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "prometheus-client" +version = "0.14.1" +description = "Python client for the Prometheus monitoring system." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +twisted = ["twisted"] + +[[package]] +name = "promise" +version = "2.3" +description = "Promises/A+ implementation for Python" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +six = "*" + +[package.extras] +test = ["pytest (>=2.7.3)", "pytest-cov", "coveralls", "futures", "pytest-benchmark", "mock"] + +[[package]] +name = "prompt-toolkit" +version = "3.0.36" +description = "Library for building powerful interactive command lines in Python" +category = "main" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "psycopg2-binary" +version = "2.9.5" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "pycodestyle" +version = "2.7.0" +description = "Python style guide checker" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pycryptodome" +version = "3.13.0" +description = "Cryptographic library for Python" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "pydantic" +version = "1.10.2" +description = "Data validation and settings management using python type hints" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +typing-extensions = ">=4.1.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pydocstyle" +version = "6.1.1" +description = "Python docstring style checker" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +snowballstemmer = "*" + +[package.extras] +toml = ["toml"] + +[[package]] +name = "pyeapi" +version = "0.8.4" +description = "Python Client for eAPI" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +netaddr = "*" + +[package.extras] +dev = ["check-manifest", "pep8", "pyflakes", "twine"] +test = ["coverage", "mock"] + +[[package]] +name = "pyflakes" +version = "2.3.1" +description = "passive checker of Python programs" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pygments" +version = "2.13.0" +description = "Pygments is a syntax highlighting package written in Python." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +plugins = ["importlib-metadata"] + +[[package]] +name = "pyjwt" +version = "2.6.0" +description = "JSON Web Token implementation in Python" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface", "cryptography (>=3.4.0)", "pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)", "pre-commit"] +docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +tests = ["pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)"] + +[[package]] +name = "pylint" +version = "2.13.9" +description = "python code static checker" +category = "dev" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +astroid = ">=2.11.5,<=2.12.0-dev0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +dill = ">=0.2" +isort = ">=4.2.5,<6" +mccabe = ">=0.6,<0.8" +platformdirs = ">=2.2.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} + +[package.extras] +testutil = ["gitpython (>3)"] + +[[package]] +name = "pylint-django" +version = "2.5.3" +description = "A Pylint plugin to help Pylint understand the Django web framework" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +pylint = ">=2.0,<3" +pylint-plugin-utils = ">=0.7" + +[package.extras] +for_tests = ["django-tables2", "factory-boy", "coverage", "pytest", "wheel", "django-tastypie", "pylint (>=2.13)"] +with_django = ["django"] + +[[package]] +name = "pylint-plugin-utils" +version = "0.7" +description = "Utilities and helpers for writing Pylint plugins" +category = "dev" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +pylint = ">=1.7" + +[[package]] +name = "pynacl" +version = "1.5.0" +description = "Python binding to the Networking and Cryptography (NaCl) library" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +cffi = ">=1.4.1" + +[package.extras] +docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] +tests = ["pytest (>=3.2.1,!=3.3.0)", "hypothesis (>=3.27.0)"] + +[[package]] +name = "pynautobot" +version = "1.2.2" +description = "Nautobot API client library" +category = "main" +optional = false +python-versions = ">=3.7,<4.0" + +[package.dependencies] +requests = ">=2.20.0,<3.0.0" + +[[package]] +name = "pyparsing" +version = "3.0.9" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +category = "main" +optional = false +python-versions = ">=3.6.8" + +[package.extras] +diagrams = ["railroad-diagrams", "jinja2"] + +[[package]] +name = "pyrsistent" +version = "0.19.2" +description = "Persistent/Functional/Immutable data structures" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "pyserial" +version = "3.5" +description = "Python Serial Port Extension" +category = "main" +optional = false +python-versions = "*" + +[package.extras] +cp2110 = ["hidapi"] + +[[package]] +name = "pytest" +version = "7.2.0" +description = "pytest: simple powerful testing with Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] + +[[package]] +name = "python-crontab" +version = "2.6.0" +description = "Python Crontab API" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +python-dateutil = "*" + +[package.extras] +cron-description = ["cron-descriptor"] +cron-schedule = ["croniter"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python3-openid" +version = "3.2.0" +description = "OpenID support for modern servers and consumers." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +defusedxml = "*" + +[package.extras] +mysql = ["mysql-connector-python"] +postgresql = ["psycopg2"] + +[[package]] +name = "pytz" +version = "2022.6" +description = "World timezone definitions, modern and historical" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "pyuwsgi" +version = "2.0.21" +description = "The uWSGI server" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "pyyaml" +version = "6.0" +description = "YAML parser and emitter for Python" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "pyyaml-env-tag" +version = "0.1" +description = "A custom YAML tag for referencing environment variables in YAML files. " +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pyyaml = "*" + +[[package]] +name = "redis" +version = "4.4.0" +description = "Python client for Redis database and key-value store" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +async-timeout = ">=4.0.2" +importlib-metadata = {version = ">=1.0", markers = "python_version < \"3.8\""} +typing-extensions = {version = "*", markers = "python_version < \"3.8\""} + +[package.extras] +hiredis = ["hiredis (>=1.0.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] + +[[package]] +name = "requests" +version = "2.28.1" +description = "Python HTTP for Humans." +category = "main" +optional = false +python-versions = ">=3.7, <4" + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<3" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-oauthlib" +version = "1.3.1" +description = "OAuthlib authentication support for Requests." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.dependencies] +oauthlib = ">=3.0.0" +requests = ">=2.0.0" + +[package.extras] +rsa = ["oauthlib[signedtoken] (>=3.0.0)"] + +[[package]] +name = "rq" +version = "1.11.1" +description = "RQ is a simple, lightweight, library for creating background jobs, and processing them." +category = "main" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +click = ">=5.0.0" +redis = ">=3.5.0" + +[[package]] +name = "ruamel.yaml" +version = "0.17.21" +description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +category = "main" +optional = false +python-versions = ">=3" + +[package.dependencies] +"ruamel.yaml.clib" = {version = ">=0.2.6", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.11\""} + +[package.extras] +docs = ["ryd"] +jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] + +[[package]] +name = "ruamel.yaml.clib" +version = "0.2.7" +description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "rx" +version = "1.6.1" +description = "Reactive Extensions (Rx) for Python" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "scp" +version = "0.14.4" +description = "scp module for paramiko" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +paramiko = "*" + +[[package]] +name = "setuptools-scm" +version = "6.4.2" +description = "the blessed package to manage your versions by scm tags" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +packaging = ">=20.0" +tomli = ">=1.0.0" + +[package.extras] +test = ["pytest (>=6.2)", "virtualenv (>20)"] +toml = ["setuptools (>=42)"] + +[[package]] +name = "singledispatch" +version = "3.7.0" +description = "Backport functools.singledispatch from Python 3.4 to Python 2.6-3.3." +category = "main" +optional = false +python-versions = ">=2.6" + +[package.dependencies] +six = "*" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=4.6)", "pytest-flake8", "pytest-cov", "pytest-black (>=0.3.7)", "unittest2", "pytest-checkdocs (>=2.4)"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "smmap" +version = "5.0.0" +description = "A pure Python implementation of a sliding window memory map manager" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "social-auth-app-django" +version = "5.0.0" +description = "Python Social Authentication, Django integration." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +social-auth-core = ">=4.1.0" + +[[package]] +name = "social-auth-core" +version = "4.3.0" +description = "Python social authentication made simple." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +cryptography = ">=1.4" +defusedxml = ">=0.5.0rc1" +oauthlib = ">=1.0.3" +PyJWT = ">=2.0.0" +python3-openid = ">=3.0.10" +requests = ">=2.9.1" +requests-oauthlib = ">=0.6.1" + +[package.extras] +all = ["python-jose (>=3.0.0)", "python3-saml (>=1.2.1)", "lxml (<4.7)", "cryptography (>=2.1.1)"] +allpy3 = ["python-jose (>=3.0.0)", "python3-saml (>=1.2.1)", "lxml (<4.7)", "cryptography (>=2.1.1)"] +azuread = ["cryptography (>=2.1.1)"] +openidconnect = ["python-jose (>=3.0.0)"] +saml = ["python3-saml (>=1.2.1)", "lxml (<4.7)"] + +[[package]] +name = "sqlparse" +version = "0.4.3" +description = "A non-validating SQL parser." +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "stevedore" +version = "3.5.2" +description = "Manage dynamic plugins for Python applications" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +importlib-metadata = {version = ">=1.7.0", markers = "python_version < \"3.8\""} +pbr = ">=2.0.0,<2.1.0 || >2.1.0" + +[[package]] +name = "structlog" +version = "21.5.0" +description = "Structured Logging for Python" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = {version = "*", markers = "python_version < \"3.8\""} + +[package.extras] +tests = ["simplejson", "pytest (>=6.0)", "pytest-asyncio", "pretend", "freezegun (>=0.2.8)", "coverage"] +docs = ["twisted", "sphinxcontrib-mermaid", "sphinx-notfound-page", "sphinx", "furo"] +dev = ["twisted", "sphinxcontrib-mermaid", "sphinx-notfound-page", "sphinx", "furo", "simplejson", "pytest (>=6.0)", "pytest-asyncio", "pretend", "freezegun (>=0.2.8)", "coverage", "tomli", "cogapp", "rich", "pre-commit"] + +[[package]] +name = "svgwrite" +version = "1.4.3" +description = "A Python library to create SVG drawings." +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "swagger-spec-validator" +version = "3.0.3" +description = "Validation of Swagger specifications" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +jsonschema = "*" +pyyaml = "*" +typing-extensions = "*" + +[[package]] +name = "tenacity" +version = "8.1.0" +description = "Retry code until it succeeds" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +doc = ["reno", "sphinx", "tornado (>=4.5)"] + +[[package]] +name = "text-unidecode" +version = "1.3" +description = "The most basic Text::Unidecode port" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "textfsm" +version = "1.1.2" +description = "Python module for parsing semi-structured text into python tables." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +future = "*" +six = "*" + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "dev" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "traitlets" +version = "5.7.1" +description = "Traitlets Python configuration system" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +lint = ["black (>=22.6.0)", "mdformat (>0.7)", "ruff (>=0.0.156)"] +test = ["pre-commit", "pytest"] +typing = ["mypy (>=0.990)"] + +[[package]] +name = "transitions" +version = "0.9.0" +description = "A lightweight, object-oriented Python state machine implementation with many extensions." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +six = "*" + +[package.extras] +diagrams = ["pygraphviz"] +test = ["pytest"] + +[[package]] +name = "ttp" +version = "0.9.2" +description = "Template Text Parser" +category = "main" +optional = false +python-versions = ">=2.7,<4.0" + +[package.extras] +full = ["cerberus (>=1.3.0,<1.4.0)", "jinja2 (>=3.0.0,<3.1.0)", "pyyaml (==6.0)", "deepdiff (>=5.8.0,<5.9.0)", "openpyxl (>=3.0.0,<3.1.0)", "tabulate (>=0.8.0,<0.9.0)", "ttp_templates (<1.0.0)", "yangson (>=1.4.0,<1.5.0)", "n2g (>=0.2.0,<0.3.0)"] +docs = ["readthedocs-sphinx-search (==0.1.1)", "Sphinx (==4.3.0)", "sphinx_rtd_theme (==1.0.0)", "sphinxcontrib-applehelp (==1.0.1)", "sphinxcontrib-devhelp (==1.0.1)", "sphinxcontrib-htmlhelp (==2.0.0)", "sphinxcontrib-jsmath (==1.0.1)", "sphinxcontrib-napoleon (==0.7)", "sphinxcontrib-qthelp (==1.0.2)", "sphinxcontrib-serializinghtml (==1.1.5)", "sphinxcontrib-spelling (==7.2.1)"] + +[[package]] +name = "ttp-templates" +version = "0.3.2" +description = "Template Text Parser Templates collections" +category = "main" +optional = false +python-versions = ">=3.6,<4.0" + +[package.dependencies] +ttp = ">=0.6.0" + +[package.extras] +docs = ["mkdocs (==1.2.4)", "mkdocs-material (==7.2.2)", "mkdocs-material-extensions (==1.0.1)", "mkdocstrings[python] (>=0.18.0,<0.19.0)", "pygments (==2.11)", "pymdown-extensions (==9.3)"] + +[[package]] +name = "typed-ast" +version = "1.5.4" +description = "a fork of Python 2 and 3 ast modules with type comment support" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "typing-extensions" +version = "4.4.0" +description = "Backported and Experimental Type Hints for Python 3.7+" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "uritemplate" +version = "4.1.1" +description = "Implementation of RFC 6570 URI Templates" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "urllib3" +version = "1.26.13" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" + +[package.extras] +brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "urllib3-secure-extra", "ipaddress"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "vine" +version = "5.0.0" +description = "Promises, promises, promises." +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "watchdog" +version = "2.2.0" +description = "Filesystem events monitoring" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +watchmedo = ["PyYAML (>=3.10)"] + +[[package]] +name = "wcwidth" +version = "0.2.5" +description = "Measures the displayed width of unicode strings in a terminal" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "wrapt" +version = "1.14.1" +description = "Module for decorators, wrappers and monkey patching." +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + +[[package]] +name = "yamllint" +version = "1.28.0" +description = "A linter for YAML files." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pathspec = ">=0.5.3" +pyyaml = "*" + +[[package]] +name = "yamlordereddictloader" +version = "0.4.0" +description = "YAML loader and dump for PyYAML allowing to keep keys order." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +pyyaml = "*" + +[[package]] +name = "zipp" +version = "3.11.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "jaraco.tidelift (>=1.4)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "func-timeout", "jaraco.functools", "more-itertools", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "pytest-flake8"] + +[extras] +nautobot = ["nautobot"] + +[metadata] +lock-version = "1.1" +python-versions = "^3.7" +content-hash = "a91e1997022251a0553f240d471770163237954e8485f2dbf08778fde5add887" + +[metadata.files] +absl-py = [] +amqp = [ + {file = "amqp-5.1.1-py3-none-any.whl", hash = "sha256:6f0956d2c23d8fa6e7691934d8c3930eadb44972cbbd1a7ae3a520f735d43359"}, + {file = "amqp-5.1.1.tar.gz", hash = "sha256:2c1b13fecc0893e946c65cbd5f36427861cffa4ea2201d8f6fca22e2a373b5e2"}, +] +aniso8601 = [ + {file = "aniso8601-7.0.0-py2.py3-none-any.whl", hash = "sha256:d10a4bf949f619f719b227ef5386e31f49a2b6d453004b21f02661ccc8670c7b"}, + {file = "aniso8601-7.0.0.tar.gz", hash = "sha256:513d2b6637b7853806ae79ffaca6f3e8754bdd547048f5ccc1420aec4b714f1e"}, +] +appnope = [ + {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, + {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, +] +asgiref = [ + {file = "asgiref-3.5.2-py3-none-any.whl", hash = "sha256:1d2880b792ae8757289136f1db2b7b99100ce959b2aa57fd69dab783d05afac4"}, + {file = "asgiref-3.5.2.tar.gz", hash = "sha256:4a29362a6acebe09bf1d6640db38c1dc3d9217c68e6f9f6204d72667fc19a424"}, +] +astroid = [] +async-timeout = [ + {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, + {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, +] +attrs = [] +backcall = [ + {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, + {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, +] +bandit = [ + {file = "bandit-1.7.4-py3-none-any.whl", hash = "sha256:412d3f259dab4077d0e7f0c11f50f650cc7d10db905d98f6520a95a18049658a"}, + {file = "bandit-1.7.4.tar.gz", hash = "sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2"}, +] +bcrypt = [] +billiard = [ + {file = "billiard-3.6.4.0-py3-none-any.whl", hash = "sha256:87103ea78fa6ab4d5c751c4909bcff74617d985de7fa8b672cf8618afd5a875b"}, + {file = "billiard-3.6.4.0.tar.gz", hash = "sha256:299de5a8da28a783d51b197d496bef4f1595dd023a93a4f59dde1886ae905547"}, +] +black = [] +cached-property = [ + {file = "cached-property-1.5.2.tar.gz", hash = "sha256:9fa5755838eecbb2d234c3aa390bd80fbd3ac6b6869109bfc1b499f7bd89a130"}, + {file = "cached_property-1.5.2-py2.py3-none-any.whl", hash = "sha256:df4f613cf7ad9a588cc381aaf4a512d26265ecebd5eb9e1ba12f1319eb85a6a0"}, +] +capirca = [] +celery = [ + {file = "celery-5.2.7-py3-none-any.whl", hash = "sha256:138420c020cd58d6707e6257b6beda91fd39af7afde5d36c6334d175302c0e14"}, + {file = "celery-5.2.7.tar.gz", hash = "sha256:fafbd82934d30f8a004f81e8f7a062e31413a23d444be8ee3326553915958c6d"}, +] +certifi = [] +cffi = [ + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, +] +charset-normalizer = [] +click = [ + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, +] +click-didyoumean = [ + {file = "click-didyoumean-0.3.0.tar.gz", hash = "sha256:f184f0d851d96b6d29297354ed981b7dd71df7ff500d82fa6d11f0856bee8035"}, + {file = "click_didyoumean-0.3.0-py3-none-any.whl", hash = "sha256:a0713dc7a1de3f06bc0df5a9567ad19ead2d3d5689b434768a6145bff77c0667"}, +] +click-plugins = [ + {file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"}, + {file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"}, +] +click-repl = [ + {file = "click-repl-0.2.0.tar.gz", hash = "sha256:cd12f68d745bf6151210790540b4cb064c7b13e571bc64b6957d98d120dacfd8"}, + {file = "click_repl-0.2.0-py3-none-any.whl", hash = "sha256:94b3fbbc9406a236f176e0506524b2937e4b23b6f4c0c0b2a0a83f8a64e9194b"}, +] +colorama = [] +coreapi = [ + {file = "coreapi-2.3.3-py2.py3-none-any.whl", hash = "sha256:bf39d118d6d3e171f10df9ede5666f63ad80bba9a29a8ec17726a66cf52ee6f3"}, + {file = "coreapi-2.3.3.tar.gz", hash = "sha256:46145fcc1f7017c076a2ef684969b641d18a2991051fddec9458ad3f78ffc1cb"}, +] +coreschema = [ + {file = "coreschema-0.0.4-py2-none-any.whl", hash = "sha256:5e6ef7bf38c1525d5e55a895934ab4273548629f16aed5c0a6caa74ebf45551f"}, + {file = "coreschema-0.0.4.tar.gz", hash = "sha256:9503506007d482ab0867ba14724b93c18a33b22b6d19fb419ef2d239dd4a1607"}, +] +coverage = [] +cryptography = [] +cycler = [ + {file = "cycler-0.11.0-py3-none-any.whl", hash = "sha256:3a27e95f763a428a739d2add979fa7494c912a32c17c4c38c4d5f082cad165a3"}, + {file = "cycler-0.11.0.tar.gz", hash = "sha256:9c87405839a19696e837b3b818fed3f5f69f16f1eec1a1ad77e043dcea9c772f"}, +] +decorator = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] +deepdiff = [ + {file = "deepdiff-5.8.1-py3-none-any.whl", hash = "sha256:e9aea49733f34fab9a0897038d8f26f9d94a97db1790f1b814cced89e9e0d2b7"}, + {file = "deepdiff-5.8.1.tar.gz", hash = "sha256:8d4eb2c4e6cbc80b811266419cb71dd95a157094a3947ccf937a94d44943c7b8"}, +] +defusedxml = [ + {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, + {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, +] +diffsync = [] +dill = [] +django = [] +django-ajax-tables = [ + {file = "django_ajax_tables-1.1.1-py3-none-any.whl", hash = "sha256:62e0138949153c0a994eefbf469f5496b1ad98bc073e170bc021a1aada7a32d0"}, + {file = "django_ajax_tables-1.1.1.tar.gz", hash = "sha256:5a7e7bc7940aa6332a564916cde22010a858a3d29fc1090ce8061010ec76337c"}, +] +django-appconf = [ + {file = "django-appconf-1.0.5.tar.gz", hash = "sha256:be3db0be6c81fa84742000b89a81c016d70ae66a7ccb620cdef592b1f1a6aaa4"}, + {file = "django_appconf-1.0.5-py3-none-any.whl", hash = "sha256:ae9f864ee1958c815a965ed63b3fba4874eec13de10236ba063a788f9a17389d"}, +] +django-cacheops = [ + {file = "django-cacheops-6.0.tar.gz", hash = "sha256:78e161ebd96a32e28e19ec7da31f2afed9e62a79726b8b5f0ed12dd16c2e5841"}, + {file = "django_cacheops-6.0-py2.py3-none-any.whl", hash = "sha256:ee38b969c9fc68f7c88e769b6c811e19563cca1ae08210d9f553ff758b6c3e17"}, +] +django-celery-beat = [ + {file = "django-celery-beat-2.2.1.tar.gz", hash = "sha256:97ae5eb309541551bdb07bf60cc57cadacf42a74287560ced2d2c06298620234"}, + {file = "django_celery_beat-2.2.1-py2.py3-none-any.whl", hash = "sha256:ab43049634fd18dc037927d7c2c7d5f67f95283a20ebbda55f42f8606412e66c"}, +] +django-constance = [] +django-cors-headers = [] +django-cryptography = [ + {file = "django-cryptography-1.0.tar.gz", hash = "sha256:13de5cf8f1250744c104b9e24774d03aa6d8488959dd40cdc016934043652445"}, + {file = "django_cryptography-1.0-py3-none-any.whl", hash = "sha256:0a99980b1cee7cc5e52f9b20b322620fea7cc124d770273e7bd285b20fd9d222"}, +] +django-db-file-storage = [ + {file = "django-db-file-storage-0.5.5.tar.gz", hash = "sha256:5d5da694b78ab202accab4508b958e0e37b3d146310e76f6f6125e1bdeaaad14"}, +] +django-debug-toolbar = [] +django-extensions = [] +django-filter = [ + {file = "django-filter-21.1.tar.gz", hash = "sha256:632a251fa8f1aadb4b8cceff932bb52fe2f826dd7dfe7f3eac40e5c463d6836e"}, + {file = "django_filter-21.1-py3-none-any.whl", hash = "sha256:f4a6737a30104c98d2e2a5fb93043f36dd7978e0c7ddc92f5998e85433ea5063"}, +] +django-health-check = [ + {file = "django-health-check-3.16.5.tar.gz", hash = "sha256:1edfd49293ccebbce29f9da609c407f307aee240ab799ab4201031341ae78c0f"}, + {file = "django_health_check-3.16.5-py2.py3-none-any.whl", hash = "sha256:8d66781a0ea82b1a8b44878187b38a27370e94f18287312e39be0593e72d8983"}, +] +django-jinja = [ + {file = "django-jinja-2.10.2.tar.gz", hash = "sha256:bfdfbb55c1f5a679d69ad575d550c4707d386634009152efe014089f3c4d1412"}, + {file = "django_jinja-2.10.2-py3-none-any.whl", hash = "sha256:dd003ec1c95c0989eb28a538831bced62b1b61da551cb44a5dfd708fcf75589f"}, +] +django-js-asset = [ + {file = "django_js_asset-2.0.0-py3-none-any.whl", hash = "sha256:86f9f300d682537ddaf0487dc2ab356581b8f50c069bdba91d334a46e449f923"}, + {file = "django_js_asset-2.0.0.tar.gz", hash = "sha256:adc1ee1efa853fad42054b540c02205344bb406c9bddf87c9e5377a41b7db90f"}, +] +django-mptt = [] +django-picklefield = [ + {file = "django-picklefield-3.1.tar.gz", hash = "sha256:c786cbeda78d6def2b43bff4840d19787809c8909f7ad683961703060398d356"}, + {file = "django_picklefield-3.1-py3-none-any.whl", hash = "sha256:d77c504df7311e8ec14e8b779f10ca6fec74de6c7f8e2c136e1ef60cf955125d"}, +] +django-pivot = [ + {file = "django-pivot-1.9.0.tar.gz", hash = "sha256:5e985d32d9ff2a6b89419dd0292c0fa2822d494ee479b5fd16cdb542abf66a88"}, + {file = "django_pivot-1.9.0-py3-none-any.whl", hash = "sha256:1c60e18e7d5f7e42856faee0961748082ddd05b01ae7c8a4baed64d2bbacd051"}, +] +django-prometheus = [ + {file = "django-prometheus-2.2.0.tar.gz", hash = "sha256:240378a1307c408bd5fc85614a3a57f1ce633d4a222c9e291e2bbf325173b801"}, + {file = "django_prometheus-2.2.0-py2.py3-none-any.whl", hash = "sha256:e6616770d8820b8834762764bf1b76ec08e1b98e72a6f359d488a2e15fe3537c"}, +] +django-redis = [ + {file = "django-redis-5.2.0.tar.gz", hash = "sha256:8a99e5582c79f894168f5865c52bd921213253b7fd64d16733ae4591564465de"}, + {file = "django_redis-5.2.0-py3-none-any.whl", hash = "sha256:1d037dc02b11ad7aa11f655d26dac3fb1af32630f61ef4428860a2e29ff92026"}, +] +django-rq = [ + {file = "django-rq-2.5.1.tar.gz", hash = "sha256:f08486602664d73a6e335872c868d79663e380247e6307496d01b8fa770fefd8"}, + {file = "django_rq-2.5.1-py2.py3-none-any.whl", hash = "sha256:7be1e10e7091555f9f36edf100b0dbb205ea2b98683d74443d2bdf3c6649a03f"}, +] +django-tables2 = [ + {file = "django-tables2-2.4.1.tar.gz", hash = "sha256:6c72dd208358539e789e4c0efd7d151e43283a4aa4093a35f44c43489e7ddeaa"}, + {file = "django_tables2-2.4.1-py2.py3-none-any.whl", hash = "sha256:50762bf3d7c61a4eb70e763c3e278650d7266bb78d0497fc8fafcf4e507c9a64"}, +] +django-taggit = [] +django-timezone-field = [ + {file = "django-timezone-field-4.1.2.tar.gz", hash = "sha256:cffac62452d060e365938aa9c9f7b72d70d8b26b9c60243bce227b35abd1b9df"}, + {file = "django_timezone_field-4.1.2-py3-none-any.whl", hash = "sha256:897c06e40b619cf5731a30d6c156886a7c64cba3a90364832148da7ef32ccf36"}, +] +django-tree-queries = [] +django-webserver = [ + {file = "django-webserver-1.2.0.tar.gz", hash = "sha256:c976979d15b5ff9a212f7904d3b779e22219aebb4857860fcaf20e4e40f1da40"}, + {file = "django_webserver-1.2.0-py2.py3-none-any.whl", hash = "sha256:09200631f266484b9e944e38e92681d6e9aa7d90d089a5c86d5fb08fddad84fe"}, +] +djangorestframework = [] +drf-spectacular = [] +drf-spectacular-sidecar = [] +drf-yasg = [] +exceptiongroup = [] +flake8 = [ + {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, + {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, +] +fonttools = [] +funcy = [ + {file = "funcy-1.17-py2.py3-none-any.whl", hash = "sha256:ba7af5e58bfc69321aaf860a1547f18d35e145706b95d1b3c966abc4f0b60309"}, + {file = "funcy-1.17.tar.gz", hash = "sha256:40b9b9a88141ae6a174df1a95861f2b82f2fdc17669080788b73a3ed9370e968"}, +] +future = [ + {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, +] +ghp-import = [ + {file = "ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343"}, + {file = "ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619"}, +] +gitdb = [] +gitpython = [] +graphene = [ + {file = "graphene-2.1.9-py2.py3-none-any.whl", hash = "sha256:3d446eb1237c551052bc31155cf1a3a607053e4f58c9172b83a1b597beaa0868"}, + {file = "graphene-2.1.9.tar.gz", hash = "sha256:b9f2850e064eebfee9a3ef4a1f8aa0742848d97652173ab44c82cc8a62b9ed93"}, +] +graphene-django = [ + {file = "graphene-django-2.15.0.tar.gz", hash = "sha256:b78c9b05bc899016b9cc5bf13faa1f37fe1faa8c5407552c6ddd1a28f46fc31a"}, + {file = "graphene_django-2.15.0-py2.py3-none-any.whl", hash = "sha256:02671d195f0c09c8649acff2a8f4ad4f297d0f7d98ea6e6cdf034b81bab92880"}, +] +graphene-django-optimizer = [ + {file = "graphene-django-optimizer-0.8.0.tar.gz", hash = "sha256:79269880d59d0a35d41751ddcb419220c4ad3871960416371119f447cb2e1a77"}, +] +graphql-core = [ + {file = "graphql-core-2.3.2.tar.gz", hash = "sha256:aac46a9ac524c9855910c14c48fc5d60474def7f99fd10245e76608eba7af746"}, + {file = "graphql_core-2.3.2-py2.py3-none-any.whl", hash = "sha256:44c9bac4514e5e30c5a595fac8e3c76c1975cae14db215e8174c7fe995825bad"}, +] +graphql-relay = [ + {file = "graphql-relay-2.0.1.tar.gz", hash = "sha256:870b6b5304123a38a0b215a79eace021acce5a466bf40cd39fa18cb8528afabb"}, + {file = "graphql_relay-2.0.1-py3-none-any.whl", hash = "sha256:ac514cb86db9a43014d7e73511d521137ac12cf0101b2eaa5f0a3da2e10d913d"}, +] +idna = [] +importlib-metadata = [] +importlib-resources = [] +inflection = [ + {file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"}, + {file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"}, +] +iniconfig = [ + {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, + {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, +] +invoke = [] +ipaddress = [] +ipython = [ + {file = "ipython-7.34.0-py3-none-any.whl", hash = "sha256:c175d2440a1caff76116eb719d40538fbb316e214eda85c5515c303aacbfb23e"}, + {file = "ipython-7.34.0.tar.gz", hash = "sha256:af3bdb46aa292bce5615b1b2ebc76c2080c5f77f54bda2ec72461317273e7cd6"}, +] +isort = [] +itypes = [ + {file = "itypes-1.2.0-py2.py3-none-any.whl", hash = "sha256:03da6872ca89d29aef62773672b2d408f490f80db48b23079a4b194c86dd04c6"}, + {file = "itypes-1.2.0.tar.gz", hash = "sha256:af886f129dea4a2a1e3d36595a2d139589e4dd287f5cab0b40e799ee81570ff1"}, +] +jedi = [] +jinja2 = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] +jsonschema = [] +junos-eznc = [] +kiwisolver = [] +kombu = [ + {file = "kombu-5.2.4-py3-none-any.whl", hash = "sha256:8b213b24293d3417bcf0d2f5537b7f756079e3ea232a8386dcc89a59fd2361a4"}, + {file = "kombu-5.2.4.tar.gz", hash = "sha256:37cee3ee725f94ea8bb173eaab7c1760203ea53bbebae226328600f9d2799610"}, +] +lazy-object-proxy = [] +lxml = [] +markdown = [ + {file = "Markdown-3.3.7-py3-none-any.whl", hash = "sha256:f5da449a6e1c989a4cea2631aa8ee67caa5a2ef855d551c88f9e309f4634c621"}, + {file = "Markdown-3.3.7.tar.gz", hash = "sha256:cbb516f16218e643d8e0a95b309f77eb118cb138d39a4f27851e6a63581db874"}, +] +markupsafe = [ + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, + {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, +] +matplotlib = [] +matplotlib-inline = [] +mccabe = [ + {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, + {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, +] +mergedeep = [ + {file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"}, + {file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"}, +] +mkdocs = [] +mock = [ + {file = "mock-4.0.3-py3-none-any.whl", hash = "sha256:122fcb64ee37cfad5b3f48d7a7d51875d7031aaf3d8be7c42e2bee25044eee62"}, + {file = "mock-4.0.3.tar.gz", hash = "sha256:7d3fbbde18228f4ff2f1f119a45cdffa458b4c0dee32eb4d2bb2f82554bac7bc"}, +] +mypy-extensions = [ + {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, + {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, +] +napalm = [] +nautobot = [] +nautobot-firewall-models = [] +nautobot-golden-config = [] +nautobot-plugin-nornir = [ + {file = "nautobot-plugin-nornir-1.0.0.tar.gz", hash = "sha256:d9301329decd7e4b9b5578c72e05c44ac84fc747002135b99538152a5ae27de3"}, + {file = "nautobot_plugin_nornir-1.0.0-py3-none-any.whl", hash = "sha256:14e4098bf0a4d0d5ca3c550e75c6acf88502fb774791865488a9eaac5eff402e"}, +] +nautobot-ssot = [] +ncclient = [ + {file = "ncclient-0.6.13.tar.gz", hash = "sha256:f9f8cea8bcbe057e1b948b9cd1b241eafb8a3f73c4981fbdfa1cc6ed69c0a7b3"}, +] +netaddr = [ + {file = "netaddr-0.8.0-py2.py3-none-any.whl", hash = "sha256:9666d0232c32d2656e5e5f8d735f58fd6c7457ce52fc21c98d45f2af78f990ac"}, + {file = "netaddr-0.8.0.tar.gz", hash = "sha256:d6cc57c7a07b1d9d2e917aa8b36ae8ce61c35ba3fcd1b83ca31c5a0ee2b5a243"}, +] +netmiko = [] +netutils = [] +nornir = [ + {file = "nornir-3.3.0-py3-none-any.whl", hash = "sha256:4590d96edb5044e6a9e6f84e15625d32932177a10654040f99e145d73b352479"}, + {file = "nornir-3.3.0.tar.gz", hash = "sha256:1c6fd283bcdff9972358b126703c0990e9076dff1dfdc211e3077d45ada937d5"}, +] +nornir-jinja2 = [ + {file = "nornir_jinja2-0.2.0-py3-none-any.whl", hash = "sha256:0c446bec7a8492923d4eb9ca00fb327603b41bc35d5f0112843c048737b506b1"}, + {file = "nornir_jinja2-0.2.0.tar.gz", hash = "sha256:9ee5e725fe5543dcba4ec8b976804e9e88ecd356ea3b62bad97578cea0de1f75"}, +] +nornir-napalm = [] +nornir-nautobot = [] +nornir-netmiko = [] +nornir-utils = [ + {file = "nornir_utils-0.2.0-py3-none-any.whl", hash = "sha256:b4c430793a74f03affd5ff2d90abc8c67a28c7ff325f48e3a01a9a44ec71b844"}, + {file = "nornir_utils-0.2.0.tar.gz", hash = "sha256:4de6aaa35e5c1a98e1c84db84a008b0b1e974dc65d88484f2dcea3e30c95fbc2"}, +] +ntc-templates = [] +numpy = [ + {file = "numpy-1.21.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38e8648f9449a549a7dfe8d8755a5979b45b3538520d1e735637ef28e8c2dc50"}, + {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fd7d7409fa643a91d0a05c7554dd68aa9c9bb16e186f6ccfe40d6e003156e33a"}, + {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a75b4498b1e93d8b700282dc8e655b8bd559c0904b3910b144646dbbbc03e062"}, + {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1412aa0aec3e00bc23fbb8664d76552b4efde98fb71f60737c83efbac24112f1"}, + {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e46ceaff65609b5399163de5893d8f2a82d3c77d5e56d976c8b5fb01faa6b671"}, + {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c6a2324085dd52f96498419ba95b5777e40b6bcbc20088fddb9e8cbb58885e8e"}, + {file = "numpy-1.21.1-cp37-cp37m-win32.whl", hash = "sha256:73101b2a1fef16602696d133db402a7e7586654682244344b8329cdcbbb82172"}, + {file = "numpy-1.21.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7a708a79c9a9d26904d1cca8d383bf869edf6f8e7650d85dbc77b041e8c5a0f8"}, + {file = "numpy-1.21.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95b995d0c413f5d0428b3f880e8fe1660ff9396dcd1f9eedbc311f37b5652e16"}, + {file = "numpy-1.21.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:635e6bd31c9fb3d475c8f44a089569070d10a9ef18ed13738b03049280281267"}, + {file = "numpy-1.21.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4a3d5fb89bfe21be2ef47c0614b9c9c707b7362386c9a3ff1feae63e0267ccb6"}, + {file = "numpy-1.21.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8a326af80e86d0e9ce92bcc1e65c8ff88297de4fa14ee936cb2293d414c9ec63"}, + {file = "numpy-1.21.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:791492091744b0fe390a6ce85cc1bf5149968ac7d5f0477288f78c89b385d9af"}, + {file = "numpy-1.21.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0318c465786c1f63ac05d7c4dbcecd4d2d7e13f0959b01b534ea1e92202235c5"}, + {file = "numpy-1.21.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a513bd9c1551894ee3d31369f9b07460ef223694098cf27d399513415855b68"}, + {file = "numpy-1.21.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:91c6f5fc58df1e0a3cc0c3a717bb3308ff850abdaa6d2d802573ee2b11f674a8"}, + {file = "numpy-1.21.1-cp38-cp38-win32.whl", hash = "sha256:978010b68e17150db8765355d1ccdd450f9fc916824e8c4e35ee620590e234cd"}, + {file = "numpy-1.21.1-cp38-cp38-win_amd64.whl", hash = "sha256:9749a40a5b22333467f02fe11edc98f022133ee1bfa8ab99bda5e5437b831214"}, + {file = "numpy-1.21.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d7a4aeac3b94af92a9373d6e77b37691b86411f9745190d2c351f410ab3a791f"}, + {file = "numpy-1.21.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d9e7912a56108aba9b31df688a4c4f5cb0d9d3787386b87d504762b6754fbb1b"}, + {file = "numpy-1.21.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:25b40b98ebdd272bc3020935427a4530b7d60dfbe1ab9381a39147834e985eac"}, + {file = "numpy-1.21.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8a92c5aea763d14ba9d6475803fc7904bda7decc2a0a68153f587ad82941fec1"}, + {file = "numpy-1.21.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05a0f648eb28bae4bcb204e6fd14603de2908de982e761a2fc78efe0f19e96e1"}, + {file = "numpy-1.21.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f01f28075a92eede918b965e86e8f0ba7b7797a95aa8d35e1cc8821f5fc3ad6a"}, + {file = "numpy-1.21.1-cp39-cp39-win32.whl", hash = "sha256:88c0b89ad1cc24a5efbb99ff9ab5db0f9a86e9cc50240177a571fbe9c2860ac2"}, + {file = "numpy-1.21.1-cp39-cp39-win_amd64.whl", hash = "sha256:01721eefe70544d548425a07c80be8377096a54118070b8a62476866d5208e33"}, + {file = "numpy-1.21.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2d4d1de6e6fb3d28781c73fbde702ac97f03d79e4ffd6598b880b2d95d62ead4"}, + {file = "numpy-1.21.1.zip", hash = "sha256:dff4af63638afcc57a3dfb9e4b26d434a7a602d225b42d746ea7fe2edf1342fd"}, +] +oauthlib = [] +ordered-set = [ + {file = "ordered-set-4.1.0.tar.gz", hash = "sha256:694a8e44c87657c59292ede72891eb91d34131f6531463aab3009191c77364a8"}, + {file = "ordered_set-4.1.0-py3-none-any.whl", hash = "sha256:046e1132c71fcf3330438a539928932caf51ddbc582496833e23de611de14562"}, +] +packaging = [ + {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, + {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, +] +pan-os-python = [] +pan-python = [] +paramiko = [] +parso = [ + {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, + {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, +] +pathspec = [] +pbr = [] +pexpect = [ + {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, + {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, +] +pickleshare = [ + {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, + {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, +] +pillow = [] +platformdirs = [] +pluggy = [ + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, +] +ply = [] +prometheus-client = [ + {file = "prometheus_client-0.14.1-py3-none-any.whl", hash = "sha256:522fded625282822a89e2773452f42df14b5a8e84a86433e3f8a189c1d54dc01"}, + {file = "prometheus_client-0.14.1.tar.gz", hash = "sha256:5459c427624961076277fdc6dc50540e2bacb98eebde99886e59ec55ed92093a"}, +] +promise = [ + {file = "promise-2.3.tar.gz", hash = "sha256:dfd18337c523ba4b6a58801c164c1904a9d4d1b1747c7d5dbf45b693a49d93d0"}, +] +prompt-toolkit = [] +psycopg2-binary = [] +ptyprocess = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] +pycodestyle = [ + {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, + {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, +] +pycparser = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] +pycryptodome = [ + {file = "pycryptodome-3.13.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:e468724173df02f9d83f3fea830bf0d04aa291b5add22b4a78e01c97aab04873"}, + {file = "pycryptodome-3.13.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:1fb7a6f222072412f320b9e48d3ce981920efbfce37b06d028ec9bd94093b37f"}, + {file = "pycryptodome-3.13.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:4f1b594d0cf35bd12ec4244df1155a7f565bf6e6245976ac36174c1564688c90"}, + {file = "pycryptodome-3.13.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:9ea70f6c3f6566159e3798e4593a4a8016994a0080ac29a45200615b45091a1b"}, + {file = "pycryptodome-3.13.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:f7aad304575d075faf2806977b726b67da7ba294adc97d878f92a062e357a56a"}, + {file = "pycryptodome-3.13.0-cp27-cp27m-manylinux2014_aarch64.whl", hash = "sha256:702446a012fd9337b9327d168bb0c7dc714eb93ad361f6f61af9ca8305a301f1"}, + {file = "pycryptodome-3.13.0-cp27-cp27m-win32.whl", hash = "sha256:681ac47c538c64305d710eaed2bb49532f62b3f4c93aa7c423c520df981392e5"}, + {file = "pycryptodome-3.13.0-cp27-cp27m-win_amd64.whl", hash = "sha256:7b3478a187d897f003b2aa1793bcc59463e8d57a42e2aafbcbbe9cd47ec46863"}, + {file = "pycryptodome-3.13.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:eec02d9199af4b1ccfe1f9c587691a07a1fa39d949d2c1dc69d079ab9af8212f"}, + {file = "pycryptodome-3.13.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:9c8e0e6c5e982699801b20fa74f43c19aa080d2b53a39f3c132d35958e153bd4"}, + {file = "pycryptodome-3.13.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:f5457e44d3f26d9946091e92b28f3e970a56538b96c87b4b155a84e32a40b7b5"}, + {file = "pycryptodome-3.13.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:88d6d54e83cf9bbd665ce1e7b9079983ee2d97a05f42e0569ff00a70f1dd8b1e"}, + {file = "pycryptodome-3.13.0-cp27-cp27mu-manylinux2014_aarch64.whl", hash = "sha256:72de8c4d71e6b11d54528bb924447fa4fdabcbb3d76cc0e7f61d3b6075def6b3"}, + {file = "pycryptodome-3.13.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:008ef2c631f112cd5a58736e0b29f4a28b4bb853e68878689f8b476fd56e0691"}, + {file = "pycryptodome-3.13.0-cp35-abi3-manylinux1_i686.whl", hash = "sha256:51ebe9624ad0a0b4da1aaaa2d43aabadf8537737fd494cee0ffa37cd6326de02"}, + {file = "pycryptodome-3.13.0-cp35-abi3-manylinux1_x86_64.whl", hash = "sha256:deede160bdf87ddb71f0a1314ad5a267b1a960be314ea7dc6b7ad86da6da89a3"}, + {file = "pycryptodome-3.13.0-cp35-abi3-manylinux2010_i686.whl", hash = "sha256:857c16bffd938254e3a834cd6b2a755ed24e1a953b1a86e33da136d3e4c16a6f"}, + {file = "pycryptodome-3.13.0-cp35-abi3-manylinux2010_x86_64.whl", hash = "sha256:ca6db61335d07220de0b665bfee7b8e9615b2dfc67a54016db4826dac34c2dd2"}, + {file = "pycryptodome-3.13.0-cp35-abi3-manylinux2014_aarch64.whl", hash = "sha256:073dedf0f9c490ae22ca081b86357646ac9b76f3e2bd89119d137fc697a9e3b6"}, + {file = "pycryptodome-3.13.0-cp35-abi3-win32.whl", hash = "sha256:e3affa03c49cce7b0a9501cc7f608d4f8e61fb2522b276d599ac049b5955576d"}, + {file = "pycryptodome-3.13.0-cp35-abi3-win_amd64.whl", hash = "sha256:e5d72be02b17e6bd7919555811264403468d1d052fa67c946e402257c3c29a27"}, + {file = "pycryptodome-3.13.0-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:0896d5d15ffe584d46cb9b69a75cf14a2bc8f6daf635b7bf16c1b041342a44b1"}, + {file = "pycryptodome-3.13.0-pp27-pypy_73-manylinux1_x86_64.whl", hash = "sha256:e420cdfca73f80fe15f79bb34756959945231a052440813e5fce531e6e96331a"}, + {file = "pycryptodome-3.13.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:720fafdf3e5c5de93039d8308f765cc60b8e9e7e852ad7135aa65dd89238191f"}, + {file = "pycryptodome-3.13.0-pp27-pypy_73-win32.whl", hash = "sha256:7a8b0e526ff239b4f4c61dd6898e2474d609843ffc437267f3a27ddff626e6f6"}, + {file = "pycryptodome-3.13.0-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d92a5eddffb0ad39f582f07c1de26e9daf6880e3e782a94bb7ebaf939567f8bf"}, + {file = "pycryptodome-3.13.0-pp36-pypy36_pp73-manylinux1_x86_64.whl", hash = "sha256:cb9453c981554984c6f5c5ce7682d7286e65e2173d7416114c3593a977a01bf5"}, + {file = "pycryptodome-3.13.0-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:765b8b16bc1fd699e183dde642c7f2653b8f3c9c1a50051139908e9683f97732"}, + {file = "pycryptodome-3.13.0-pp36-pypy36_pp73-win32.whl", hash = "sha256:b3af53dddf848afb38b3ac2bae7159ddad1feb9bac14aa3acec6ef1797b82f8d"}, + {file = "pycryptodome-3.13.0.tar.gz", hash = "sha256:95bacf9ff7d1b90bba537d3f5f6c834efe6bfbb1a0195cb3573f29e6716ef08d"}, +] +pydantic = [] +pydocstyle = [ + {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"}, + {file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"}, +] +pyeapi = [ + {file = "pyeapi-0.8.4.tar.gz", hash = "sha256:c33ad1eadd8ebac75f63488df9412081ce0b024c9e1da12a37196a5c60427c54"}, +] +pyflakes = [ + {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, + {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, +] +pygments = [] +pyjwt = [] +pylint = [ + {file = "pylint-2.13.9-py3-none-any.whl", hash = "sha256:705c620d388035bdd9ff8b44c5bcdd235bfb49d276d488dd2c8ff1736aa42526"}, + {file = "pylint-2.13.9.tar.gz", hash = "sha256:095567c96e19e6f57b5b907e67d265ff535e588fe26b12b5ebe1fc5645b2c731"}, +] +pylint-django = [ + {file = "pylint-django-2.5.3.tar.gz", hash = "sha256:0ac090d106c62fe33782a1d01bda1610b761bb1c9bf5035ced9d5f23a13d8591"}, + {file = "pylint_django-2.5.3-py3-none-any.whl", hash = "sha256:56b12b6adf56d548412445bd35483034394a1a94901c3f8571980a13882299d5"}, +] +pylint-plugin-utils = [ + {file = "pylint-plugin-utils-0.7.tar.gz", hash = "sha256:ce48bc0516ae9415dd5c752c940dfe601b18fe0f48aa249f2386adfa95a004dd"}, + {file = "pylint_plugin_utils-0.7-py3-none-any.whl", hash = "sha256:b3d43e85ab74c4f48bb46ae4ce771e39c3a20f8b3d56982ab17aa73b4f98d535"}, +] +pynacl = [ + {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858"}, + {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b"}, + {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff"}, + {file = "PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543"}, + {file = "PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93"}, + {file = "PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"}, +] +pynautobot = [] +pyparsing = [ + {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, + {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, +] +pyrsistent = [] +pyserial = [ + {file = "pyserial-3.5-py2.py3-none-any.whl", hash = "sha256:c4451db6ba391ca6ca299fb3ec7bae67a5c55dde170964c7a14ceefec02f2cf0"}, + {file = "pyserial-3.5.tar.gz", hash = "sha256:3c77e014170dfffbd816e6ffc205e9842efb10be9f58ec16d3e8675b4925cddb"}, +] +pytest = [] +python-crontab = [ + {file = "python-crontab-2.6.0.tar.gz", hash = "sha256:1e35ed7a3cdc3100545b43e196d34754e6551e7f95e4caebbe0e1c0ca41c2f1b"}, +] +python-dateutil = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] +python3-openid = [ + {file = "python3-openid-3.2.0.tar.gz", hash = "sha256:33fbf6928f401e0b790151ed2b5290b02545e8775f982485205a066f874aaeaf"}, + {file = "python3_openid-3.2.0-py3-none-any.whl", hash = "sha256:6626f771e0417486701e0b4daff762e7212e820ca5b29fcc0d05f6f8736dfa6b"}, +] +pytz = [] +pyuwsgi = [] +pyyaml = [ + {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, + {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, + {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, + {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, + {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, + {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, + {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, + {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, + {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, + {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, + {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, + {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, + {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, + {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, +] +pyyaml-env-tag = [ + {file = "pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069"}, + {file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"}, +] +redis = [] +requests = [ + {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, + {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, +] +requests-oauthlib = [ + {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, + {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, +] +rq = [] +"ruamel.yaml" = [ + {file = "ruamel.yaml-0.17.21-py3-none-any.whl", hash = "sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7"}, + {file = "ruamel.yaml-0.17.21.tar.gz", hash = "sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af"}, +] +"ruamel.yaml.clib" = [] +rx = [ + {file = "Rx-1.6.1-py2.py3-none-any.whl", hash = "sha256:7357592bc7e881a95e0c2013b73326f704953301ab551fbc8133a6fadab84105"}, + {file = "Rx-1.6.1.tar.gz", hash = "sha256:13a1d8d9e252625c173dc795471e614eadfe1cf40ffc684e08b8fff0d9748c23"}, +] +scp = [ + {file = "scp-0.14.4-py2.py3-none-any.whl", hash = "sha256:29ddaafbfba60793a8a779694c97d8c150d365668a4ef67616c515b80a69ef2f"}, + {file = "scp-0.14.4.tar.gz", hash = "sha256:54699b92cb68ae34b5928c48a888eab9722a212502cba89aa795bd56597505bd"}, +] +setuptools-scm = [ + {file = "setuptools_scm-6.4.2-py3-none-any.whl", hash = "sha256:acea13255093849de7ccb11af9e1fb8bde7067783450cee9ef7a93139bddf6d4"}, + {file = "setuptools_scm-6.4.2.tar.gz", hash = "sha256:6833ac65c6ed9711a4d5d2266f8024cfa07c533a0e55f4c12f6eff280a5a9e30"}, +] +singledispatch = [ + {file = "singledispatch-3.7.0-py2.py3-none-any.whl", hash = "sha256:bc77afa97c8a22596d6d4fc20f1b7bdd2b86edc2a65a4262bdd7cc3cc19aa989"}, + {file = "singledispatch-3.7.0.tar.gz", hash = "sha256:c1a4d5c1da310c3fd8fccfb8d4e1cb7df076148fd5d858a819e37fffe44f3092"}, +] +six = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] +smmap = [ + {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, + {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, +] +snowballstemmer = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] +social-auth-app-django = [ + {file = "social-auth-app-django-5.0.0.tar.gz", hash = "sha256:b6e3132ce087cdd6e1707aeb1b588be41d318408fcf6395435da0bc6fe9a9795"}, + {file = "social_auth_app_django-5.0.0-py3-none-any.whl", hash = "sha256:52241a25445a010ab1c108bafff21fc5522d5c8cd0d48a92c39c7371824b065d"}, +] +social-auth-core = [ + {file = "social-auth-core-4.3.0.tar.gz", hash = "sha256:4686f0e43cf12954216875a32e944847bb1dc69e7cd9573d16a9003bb05ca477"}, + {file = "social_auth_core-4.3.0-py3-none-any.whl", hash = "sha256:1e3440d104f743b02dfe258c9d4dba5b4065abf24b2f7eb362b47054d21797df"}, +] +sqlparse = [] +stevedore = [] +structlog = [] +svgwrite = [] +swagger-spec-validator = [] +tenacity = [] +text-unidecode = [ + {file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"}, + {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, +] +textfsm = [ + {file = "textfsm-1.1.2-py2.py3-none-any.whl", hash = "sha256:f3d4e9bd4344935a08e6844e53d6220e2e4fb7e465bee51fa909152ed6bab406"}, + {file = "textfsm-1.1.2.tar.gz", hash = "sha256:85a450b441aff04b1cac726bdb36f35534a5b196cca08c8bc14fddd879c4255c"}, +] +toml = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] +tomli = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] +traitlets = [] +transitions = [] +ttp = [] +ttp-templates = [] +typed-ast = [ + {file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"}, + {file = "typed_ast-1.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62"}, + {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac"}, + {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe"}, + {file = "typed_ast-1.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72"}, + {file = "typed_ast-1.5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec"}, + {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47"}, + {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6"}, + {file = "typed_ast-1.5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1"}, + {file = "typed_ast-1.5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6"}, + {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66"}, + {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c"}, + {file = "typed_ast-1.5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2"}, + {file = "typed_ast-1.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d"}, + {file = "typed_ast-1.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f"}, + {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc"}, + {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6"}, + {file = "typed_ast-1.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e"}, + {file = "typed_ast-1.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35"}, + {file = "typed_ast-1.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97"}, + {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3"}, + {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72"}, + {file = "typed_ast-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1"}, + {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"}, +] +typing-extensions = [] +uritemplate = [ + {file = "uritemplate-4.1.1-py2.py3-none-any.whl", hash = "sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e"}, + {file = "uritemplate-4.1.1.tar.gz", hash = "sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0"}, +] +urllib3 = [] +vine = [ + {file = "vine-5.0.0-py2.py3-none-any.whl", hash = "sha256:4c9dceab6f76ed92105027c49c823800dd33cacce13bdedc5b914e3514b7fb30"}, + {file = "vine-5.0.0.tar.gz", hash = "sha256:7d3b1624a953da82ef63462013bbd271d3eb75751489f9807598e8f340bd637e"}, +] +watchdog = [] +wcwidth = [ + {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, + {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, +] +wrapt = [ + {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1"}, + {file = "wrapt-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320"}, + {file = "wrapt-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2"}, + {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4"}, + {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069"}, + {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310"}, + {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f"}, + {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656"}, + {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"}, + {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"}, + {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d"}, + {file = "wrapt-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7"}, + {file = "wrapt-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00"}, + {file = "wrapt-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4"}, + {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1"}, + {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1"}, + {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff"}, + {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d"}, + {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1"}, + {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569"}, + {file = "wrapt-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed"}, + {file = "wrapt-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471"}, + {file = "wrapt-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248"}, + {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68"}, + {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d"}, + {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77"}, + {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7"}, + {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015"}, + {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a"}, + {file = "wrapt-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853"}, + {file = "wrapt-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c"}, + {file = "wrapt-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456"}, + {file = "wrapt-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f"}, + {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc"}, + {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1"}, + {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"}, + {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b"}, + {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0"}, + {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57"}, + {file = "wrapt-1.14.1-cp38-cp38-win32.whl", hash = "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5"}, + {file = "wrapt-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d"}, + {file = "wrapt-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383"}, + {file = "wrapt-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7"}, + {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86"}, + {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735"}, + {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b"}, + {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3"}, + {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3"}, + {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe"}, + {file = "wrapt-1.14.1-cp39-cp39-win32.whl", hash = "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5"}, + {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"}, + {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, +] +yamllint = [] +yamlordereddictloader = [ + {file = "yamlordereddictloader-0.4.0.tar.gz", hash = "sha256:7f30f0b99ea3f877f7cb340c570921fa9d639b7f69cba18be051e27f8de2080e"}, +] +zipp = [] diff --git a/webinars/panorama-ssot/pyproject.toml b/webinars/panorama-ssot/pyproject.toml new file mode 100644 index 0000000..370ff4c --- /dev/null +++ b/webinars/panorama-ssot/pyproject.toml @@ -0,0 +1,118 @@ +[tool.poetry] +name = "nautobot-ssot-panorama" +version = "0.1.0" +description = "SSoT sync capabilities with Nautobot Firewall Models Plugin and Panorama" +authors = ["Network to Code, LLC "] +license = "Apache-2.0" +readme = "README.md" +homepage = "https://github.com/networktocode/nautobot-plugin-ssot-panorama" +repository = "https://github.com/networktocode/nautobot-plugin-ssot-panorama" +keywords = ["nautobot", "nautobot-plugin"] +include = [ + "LICENSE", + "README.md", +] +packages = [ + { include = "nautobot_ssot_panorama" }, +] + +[tool.poetry.dependencies] +python = "^3.7" +# Required for Python 3.7 for now. See: https://stackoverflow.com/a/73932581/194311 +importlib-metadata = "4.13.0" +# Used for local development +nautobot = { version = "^1.4.0", optional = true } +nautobot-ssot = "^1.1.0" +nautobot-firewall-models = "1.2.0a2" +pan-os-python = "^1.7.3" +nautobot-golden-config = { git = "https://github.com/whitej6/nautobot-plugin-golden-config.git", branch = "jlw-json-fix" } + +[tool.poetry.dev-dependencies] +bandit = "*" +black = "*" +coverage = "*" +django-debug-toolbar = "*" +# we need to pin flake8 because of package dependencies that cause it to downgrade and +# therefore cause issues with linting since older versions do not take .flake8 as config +flake8 = "^3.9.2" +invoke = "*" +ipython = "*" +mkdocs = "*" +pydocstyle = "*" +pylint = "*" +pylint-django = "*" +pytest = "*" +yamllint = "*" +Markdown = "*" +toml = "*" + +[tool.poetry.extras] +nautobot = ["nautobot"] + +[tool.black] +line-length = 120 +target-version = ['py37'] +include = '\.pyi?$' +exclude = ''' +( + /( + \.eggs # exclude a few common directories in the + | \.git # root of the project + | \.hg + | \.mypy_cache + | \.tox + | \.venv + | _build + | buck-out + | build + | dist + )/ + | settings.py # This is where you define files that should not be stylized by black + # the root of the project +) +''' + +[tool.pylint.master] +# Include the pylint_django plugin to avoid spurious warnings about Django patterns +load-plugins="pylint_django" +ignore=".venv" + +[tool.pylint.basic] +# No docstrings required for private methods (Pylint default), or for test_ functions, or for inner Meta classes. +no-docstring-rgx="^(_|test_|Meta$)" + +[tool.pylint.messages_control] +# Line length is enforced by Black, so pylint doesn't need to check it. +# Pylint and Black disagree about how to format multi-line arrays; Black wins. +disable = """, + line-too-long + """ + +[tool.pylint.miscellaneous] +# Don't flag TODO as a failure, let us commit with things that still need to be done in the code +notes = """, + FIXME, + XXX, + """ + +[tool.pydocstyle] +convention = "google" +inherit = false +match = "(?!__init__).*\\.py" +match-dir = "(?!tests|migrations|development)[^\\.].*" +# D212 is enabled by default in google convention, and complains if we have a docstring like: +# """ +# My docstring is on the line after the opening quotes instead of on the same line as them. +# """ +# We've discussed and concluded that we consider this to be a valid style choice. +add_ignore = "D212" + +[build-system] +requires = ["poetry_core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +testpaths = [ + "tests" +] +addopts = "-vv --doctest-modules" diff --git a/webinars/panorama-ssot/tasks.py b/webinars/panorama-ssot/tasks.py new file mode 100644 index 0000000..0703530 --- /dev/null +++ b/webinars/panorama-ssot/tasks.py @@ -0,0 +1,420 @@ +"""Tasks for use with Invoke. + +Copyright (c) 2022, Network to Code, LLC +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +from distutils.util import strtobool +from invoke import Collection, task as invoke_task +import os + + +def is_truthy(arg): + """Convert "truthy" strings into Booleans. + + Examples: + >>> is_truthy('yes') + True + Args: + arg (str): Truthy string (True values are y, yes, t, true, on and 1; false values are n, no, + f, false, off and 0. Raises ValueError if val is anything else. + """ + if isinstance(arg, bool): + return arg + return bool(strtobool(arg)) + + +# Use pyinvoke configuration for default values, see http://docs.pyinvoke.org/en/stable/concepts/configuration.html +# Variables may be overwritten in invoke.yml or by the environment variables INVOKE_NAUTOBOT_SSOT_PANORAMA_xxx +namespace = Collection("nautobot_ssot_panorama") +namespace.configure( + { + "nautobot_ssot_panorama": { + "nautobot_ver": "latest", + "project_name": "nautobot_ssot_panorama", + "python_ver": "3.7", + "local": False, + "compose_dir": os.path.join(os.path.dirname(__file__), "development"), + "compose_files": [ + "docker-compose.base.yml", + "docker-compose.redis.yml", + "docker-compose.postgres.yml", + "docker-compose.dev.yml", + ], + "compose_http_timeout": "86400", + } + } +) + + +def task(function=None, *args, **kwargs): + """Task decorator to override the default Invoke task decorator and add each task to the invoke namespace.""" + + def task_wrapper(function=None): + """Wrapper around invoke.task to add the task to the namespace as well.""" + if args or kwargs: + task_func = invoke_task(*args, **kwargs)(function) + else: + task_func = invoke_task(function) + namespace.add_task(task_func) + return task_func + + if function: + # The decorator was called with no arguments + return task_wrapper(function) + # The decorator was called with arguments + return task_wrapper + + +def docker_compose(context, command, **kwargs): + """Helper function for running a specific docker-compose command with all appropriate parameters and environment. + + Args: + context (obj): Used to run specific commands + command (str): Command string to append to the "docker-compose ..." command, such as "build", "up", etc. + **kwargs: Passed through to the context.run() call. + """ + build_env = { + # Note: 'docker-compose logs' will stop following after 60 seconds by default, + # so we are overriding that by setting this environment variable. + "COMPOSE_HTTP_TIMEOUT": context.nautobot_ssot_panorama.compose_http_timeout, + "NAUTOBOT_VER": context.nautobot_ssot_panorama.nautobot_ver, + "PYTHON_VER": context.nautobot_ssot_panorama.python_ver, + } + compose_command = f'docker-compose --project-name {context.nautobot_ssot_panorama.project_name} --project-directory "{context.nautobot_ssot_panorama.compose_dir}"' + for compose_file in context.nautobot_ssot_panorama.compose_files: + compose_file_path = os.path.join(context.nautobot_ssot_panorama.compose_dir, compose_file) + compose_command += f' -f "{compose_file_path}"' + compose_command += f" {command}" + print(f'Running docker-compose command "{command}"') + return context.run(compose_command, env=build_env, **kwargs) + + +def run_command(context, command, **kwargs): + """Wrapper to run a command locally or inside the nautobot container.""" + if is_truthy(context.nautobot_ssot_panorama.local): + context.run(command, **kwargs) + else: + # Check if nautobot is running, no need to start another nautobot container to run a command + docker_compose_status = "ps --services --filter status=running" + results = docker_compose(context, docker_compose_status, hide="out") + if "nautobot" in results.stdout: + compose_command = f"exec nautobot {command}" + else: + compose_command = f"run --entrypoint '{command}' nautobot" + + docker_compose(context, compose_command, pty=True) + + +# ------------------------------------------------------------------------------ +# BUILD +# ------------------------------------------------------------------------------ +@task( + help={ + "force_rm": "Always remove intermediate containers", + "cache": "Whether to use Docker's cache when building the image (defaults to enabled)", + } +) +def build(context, force_rm=False, cache=True): + """Build Nautobot docker image.""" + command = "build" + + if not cache: + command += " --no-cache" + if force_rm: + command += " --force-rm" + + print(f"Building Nautobot with Python {context.nautobot_ssot_panorama.python_ver}...") + docker_compose(context, command) + + +@task +def generate_packages(context): + """Generate all Python packages inside docker and copy the file locally under dist/.""" + command = "poetry build" + run_command(context, command) + + +# ------------------------------------------------------------------------------ +# START / STOP / DEBUG +# ------------------------------------------------------------------------------ +@task +def debug(context): + """Start Nautobot and its dependencies in debug mode.""" + print("Starting Nautobot in debug mode...") + docker_compose(context, "up") + + +@task +def start(context): + """Start Nautobot and its dependencies in detached mode.""" + print("Starting Nautobot in detached mode...") + docker_compose(context, "up --detach") + + +@task +def restart(context): + """Gracefully restart all containers.""" + print("Restarting Nautobot...") + docker_compose(context, "restart") + + +@task +def stop(context): + """Stop Nautobot and its dependencies.""" + print("Stopping Nautobot...") + docker_compose(context, "down") + + +@task +def destroy(context): + """Destroy all containers and volumes.""" + print("Destroying Nautobot...") + docker_compose(context, "down --volumes") + + +@task +def vscode(context): + """Launch Visual Studio Code with the appropriate Environment variables to run in a container.""" + command = "code nautobot.code-workspace" + + context.run(command) + + +@task( + help={ + "service": "Docker-compose service name to view (default: nautobot)", + "follow": "Follow logs", + "tail": "Tail N number of lines or 'all'", + } +) +def logs(context, service="nautobot", follow=False, tail=None): + """View the logs of a docker-compose service.""" + command = "logs " + + if follow: + command += "--follow " + if tail: + command += f"--tail={tail} " + + command += service + docker_compose(context, command) + + +# ------------------------------------------------------------------------------ +# ACTIONS +# ------------------------------------------------------------------------------ +@task +def nbshell(context): + """Launch an interactive nbshell session.""" + command = "nautobot-server nbshell" + run_command(context, command) + + +@task +def shell_plus(context): + """Launch an interactive shell_plus session.""" + command = "nautobot-server shell_plus" + run_command(context, command) + + +@task +def cli(context): + """Launch a bash shell inside the running Nautobot container.""" + run_command(context, "bash") + + +@task( + help={ + "user": "name of the superuser to create (default: admin)", + } +) +def createsuperuser(context, user="admin"): + """Create a new Nautobot superuser account (default: "admin"), will prompt for password.""" + command = f"nautobot-server createsuperuser --username {user}" + + run_command(context, command) + + +@task( + help={ + "name": "name of the migration to be created; if unspecified, will autogenerate a name", + } +) +def makemigrations(context, name=""): + """Perform makemigrations operation in Django.""" + command = "nautobot-server makemigrations nautobot_ssot_panorama" + + if name: + command += f" --name {name}" + + run_command(context, command) + + +@task +def migrate(context): + """Perform migrate operation in Django.""" + command = "nautobot-server migrate" + + run_command(context, command) + + +@task(help={}) +def post_upgrade(context): + """ + Performs Nautobot common post-upgrade operations using a single entrypoint. + + This will run the following management commands with default settings, in order: + + - migrate + - trace_paths + - collectstatic + - remove_stale_contenttypes + - clearsessions + - invalidate all + """ + command = "nautobot-server post_upgrade" + + run_command(context, command) + + +# ------------------------------------------------------------------------------ +# TESTS +# ------------------------------------------------------------------------------ +@task( + help={ + "autoformat": "Apply formatting recommendations automatically, rather than failing if formatting is incorrect.", + } +) +def black(context, autoformat=False): + """Check Python code style with Black.""" + if autoformat: + black_command = "black" + else: + black_command = "black --check --diff" + + command = f"{black_command} ." + + run_command(context, command) + + +@task +def flake8(context): + """Check for PEP8 compliance and other style issues.""" + command = "flake8 . --config .flake8" + run_command(context, command) + + +@task +def hadolint(context): + """Check Dockerfile for hadolint compliance and other style issues.""" + command = "hadolint development/Dockerfile" + run_command(context, command) + + +@task +def pylint(context): + """Run pylint code analysis.""" + command = 'pylint --init-hook "import nautobot; nautobot.setup()" --rcfile pyproject.toml nautobot_ssot_panorama' + run_command(context, command) + + +@task +def pydocstyle(context): + """Run pydocstyle to validate docstring formatting adheres to NTC defined standards.""" + # We exclude the /migrations/ directory since it is autogenerated code + command = "pydocstyle ." + run_command(context, command) + + +@task +def bandit(context): + """Run bandit to validate basic static code security analysis.""" + command = "bandit --recursive . --configfile .bandit.yml" + run_command(context, command) + + +@task +def yamllint(context): + """Run yamllint to validate formating adheres to NTC defined YAML standards. + + Args: + context (obj): Used to run specific commands + """ + command = "yamllint . --format standard" + run_command(context, command) + + +@task +def check_migrations(context): + """Check for missing migrations.""" + command = "nautobot-server --config=nautobot/core/tests/nautobot_config.py makemigrations --dry-run --check" + + run_command(context, command) + + +@task( + help={ + "keepdb": "save and re-use test database between test runs for faster re-testing.", + "label": "specify a directory or module to test instead of running all Nautobot tests", + "failfast": "fail as soon as a single test fails don't run the entire test suite", + "buffer": "Discard output from passing tests", + } +) +def unittest(context, keepdb=False, label="nautobot_ssot_panorama", failfast=False, buffer=True): + """Run Nautobot unit tests.""" + command = f"coverage run --module nautobot.core.cli test {label}" + + if keepdb: + command += " --keepdb" + if failfast: + command += " --failfast" + if buffer: + command += " --buffer" + run_command(context, command) + + +@task +def unittest_coverage(context): + """Report on code test coverage as measured by 'invoke unittest'.""" + command = "coverage report --skip-covered --include 'nautobot_ssot_panorama/*' --omit *migrations*" + + run_command(context, command) + + +@task( + help={ + "failfast": "fail as soon as a single test fails don't run the entire test suite", + } +) +def tests(context, failfast=False): + """Run all tests for this plugin.""" + # If we are not running locally, start the docker containers so we don't have to for each test + if not is_truthy(context.nautobot_ssot_panorama.local): + print("Starting Docker Containers...") + start(context) + # Sorted loosely from fastest to slowest + print("Running black...") + black(context) + print("Running flake8...") + flake8(context) + print("Running bandit...") + bandit(context) + print("Running pydocstyle...") + pydocstyle(context) + print("Running yamllint...") + yamllint(context) + print("Running pylint...") + pylint(context) + print("Running unit tests...") + unittest(context, failfast=failfast) + print("All tests have passed!") + unittest_coverage(context)