From 2ed155d77fb56bb40e789660863e241ee63d450f Mon Sep 17 00:00:00 2001 From: scetron Date: Thu, 4 Jan 2024 14:39:31 -0500 Subject: [PATCH 001/225] add more api documentation --- docs/user/app_use_cases.md | 34 ++++++++++++++++++++++++++++++++-- 1 file changed, 32 insertions(+), 2 deletions(-) diff --git a/docs/user/app_use_cases.md b/docs/user/app_use_cases.md index 7fb4499a..898d726b 100644 --- a/docs/user/app_use_cases.md +++ b/docs/user/app_use_cases.md @@ -88,7 +88,10 @@ A Platform that will work with Arista EOS devices must have specific values for A new device can be onboarded via : - A job execution. -- An API, via a `POST` to `/api/extras/jobs/{id}/run` +- An API, via a `POST` to `/api/extras/jobs/Perform%20Device%20Onboarding/run` or `/api/extras/jobs/{id}/run` + +!!! note + The Device Onboarding Job's ID (UUID) will be different per deployment. During a successful onboarding process, a new device will be created in Nautobot with its management interface and its primary IP assigned. The management interface will be discovered on the device based on the IP address provided. @@ -112,6 +115,33 @@ When onboarding an Arista EOS device, there are a few requirements: ### Consult the Status of Onboarding Tasks -The status of the onboarding process for each device is maintained is a dedicated table in Nautobot and can be retrieved: +The status of onboarding jobs can be viewed via the corresponding Job-Results under the Jobs page in Nautobot. - Via the UI via Job-Results - Via the API via Job-Results + +# API + +!!! note + In V3.0, with the move of the app to a job, the dedicated API views have been removed. This also removes API documentation from the built in Swagger API documentation. + +To run an onboarding task Job via the api: + + +Post to `/api/extras/jobs/Perform%20Device%20Onboarding/run/` with the relevent onboarding data: + +```bash +curl -X "POST" /api/extras/jobs/Perform%20Device%20Onboarding/run/ -H "Content-Type: application/json" -H "Authorization: Token $NAUTOBOT_TOKEN" -d '{"data": {"location": "", "ip_address": "", "port": 22, "timeout": 30}} +``` + +Required Fields: + location: Location UUID + ip_address: String of IP or CSV of IPs + port: Integer + timeout: Integer + +Optional Fields: + credentials: Secret Group UUID + platform: Platform UUID + role: Role UUID + device_type: Device Type UUID + continue_on_failure: Boolean From 21aa52fb85d992bd3f8c92366d7aa74c7730a4f2 Mon Sep 17 00:00:00 2001 From: Stephen Corry Date: Sat, 6 Jan 2024 10:43:58 -0500 Subject: [PATCH 002/225] Apply suggestions from code review Co-authored-by: Joe Wesch <10467633+joewesch@users.noreply.github.com> --- docs/user/app_use_cases.md | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/docs/user/app_use_cases.md b/docs/user/app_use_cases.md index 898d726b..a918282d 100644 --- a/docs/user/app_use_cases.md +++ b/docs/user/app_use_cases.md @@ -88,10 +88,10 @@ A Platform that will work with Arista EOS devices must have specific values for A new device can be onboarded via : - A job execution. -- An API, via a `POST` to `/api/extras/jobs/Perform%20Device%20Onboarding/run` or `/api/extras/jobs/{id}/run` +- API, via a `POST` to `/api/extras/jobs/Perform%20Device%20Onboarding/run` or `/api/extras/jobs/{id}/run` !!! note - The Device Onboarding Job's ID (UUID) will be different per deployment. + The Device Onboarding Job's ID (UUID) will be different per Nautobot instance. During a successful onboarding process, a new device will be created in Nautobot with its management interface and its primary IP assigned. The management interface will be discovered on the device based on the IP address provided. @@ -115,9 +115,7 @@ When onboarding an Arista EOS device, there are a few requirements: ### Consult the Status of Onboarding Tasks -The status of onboarding jobs can be viewed via the corresponding Job-Results under the Jobs page in Nautobot. -- Via the UI via Job-Results -- Via the API via Job-Results +The status of onboarding jobs can be viewed via the UI (Jobs > Job Results) or retrieved via API (`/api/extras/job-results/`) with each process corresponding to an individual Job-Result object. # API From 79c2dfed858fa6bdf97fbd4535264ffe6e65a05a Mon Sep 17 00:00:00 2001 From: David Cates Date: Tue, 16 Jan 2024 22:45:16 -0700 Subject: [PATCH 003/225] update ssot integration --- development/development.env | 2 + development/nautobot_config.py | 18 +-- nautobot_device_onboarding/__init__.py | 2 +- .../diffsync/__init__.py | 1 + .../diffsync/adapters/__init__.py | 1 + .../adapters/network_importer_adapters.py | 12 ++ .../diffsync/adapters/onboarding_adapters.py | 36 ++++++ .../models/network_importer_models.py | 1 + .../diffsync/models/onboarding_models.py | 20 +++ nautobot_device_onboarding/jobs.py | 115 +++++++++++++++++- pyproject.toml | 4 +- tasks.py | 2 +- 12 files changed, 201 insertions(+), 13 deletions(-) create mode 100644 nautobot_device_onboarding/diffsync/__init__.py create mode 100644 nautobot_device_onboarding/diffsync/adapters/__init__.py create mode 100644 nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py create mode 100644 nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py create mode 100644 nautobot_device_onboarding/diffsync/models/network_importer_models.py create mode 100644 nautobot_device_onboarding/diffsync/models/onboarding_models.py diff --git a/development/development.env b/development/development.env index 54f0b870..11d1edcc 100644 --- a/development/development.env +++ b/development/development.env @@ -36,3 +36,5 @@ POSTGRES_DB=${NAUTOBOT_DB_NAME} MYSQL_USER=${NAUTOBOT_DB_USER} MYSQL_DATABASE=${NAUTOBOT_DB_NAME} MYSQL_ROOT_HOST=% + +NAUTOBOT_SSOT_HIDE_EXAMPLE_JOBS="True" diff --git a/development/nautobot_config.py b/development/nautobot_config.py index 315261a3..48e7f1a5 100644 --- a/development/nautobot_config.py +++ b/development/nautobot_config.py @@ -129,13 +129,17 @@ # # Enable installed Apps. Add the name of each App to the list. -PLUGINS = ["nautobot_device_onboarding"] +PLUGINS = [ + "nautobot_device_onboarding", + "nautobot_ssot", + ] # Apps configuration settings. These settings are used by various Apps that the user may have installed. # Each key in the dictionary is the name of an installed App and its value is a dictionary of settings. -# PLUGINS_CONFIG = { -# 'nautobot_device_onboarding': { -# 'foo': 'bar', -# 'buzz': 'bazz' -# } -# } +PLUGINS_CONFIG = { + 'nautobot_device_onboarding': { + }, +"nautobot_ssot": { + "hide_example_jobs": is_truthy(os.getenv("NAUTOBOT_SSOT_HIDE_EXAMPLE_JOBS")), + }, +} diff --git a/nautobot_device_onboarding/__init__.py b/nautobot_device_onboarding/__init__.py index c9150e85..3647ea33 100644 --- a/nautobot_device_onboarding/__init__.py +++ b/nautobot_device_onboarding/__init__.py @@ -17,7 +17,7 @@ class NautobotDeviceOnboardingConfig(NautobotAppConfig): description = "Nautobot App that simplifies device onboarding (and re-onboarding) by collecting and populating common device 'facts' into Nautobot." base_url = "nautobot-device-onboarding" required_settings = [] - min_version = "2.0.3" + min_version = "2.1.1" max_version = "2.9999" default_settings = { "create_platform_if_missing": True, diff --git a/nautobot_device_onboarding/diffsync/__init__.py b/nautobot_device_onboarding/diffsync/__init__.py new file mode 100644 index 00000000..71c0e44d --- /dev/null +++ b/nautobot_device_onboarding/diffsync/__init__.py @@ -0,0 +1 @@ +"""Diffsync.""" \ No newline at end of file diff --git a/nautobot_device_onboarding/diffsync/adapters/__init__.py b/nautobot_device_onboarding/diffsync/adapters/__init__.py new file mode 100644 index 00000000..8f0a3e84 --- /dev/null +++ b/nautobot_device_onboarding/diffsync/adapters/__init__.py @@ -0,0 +1 @@ +"""Adapter classes for loading DiffSyncModels with data from a Network or Nautobot.""" \ No newline at end of file diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py new file mode 100644 index 00000000..d07b61a0 --- /dev/null +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -0,0 +1,12 @@ +"""DiffSync adapters.""" + +from nautobot_ssot.contrib import NautobotAdapter +from diffsync import DiffSync + + +class NetworkImporterNautobotAdapter(NautobotAdapter): + pass + + +class NetworkImporterNetworkAdapter(DiffSync): + pass \ No newline at end of file diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py new file mode 100644 index 00000000..13ab8a97 --- /dev/null +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -0,0 +1,36 @@ +"""DiffSync adapters.""" + +from diffsync import DiffSync +from nautobot_ssot.contrib import NautobotAdapter +from nautobot_device_onboarding.diffsync.models import onboarding_models + + +class OnboardingNautobotAdapter(NautobotAdapter): + """Adapter for loading Nautobot data.""" + + device_type = onboarding_models.OnboardingDeviceType + device = onboarding_models.OnboardingDevice + + top_level = ["device_type", "device"] + +class OnboardingNetworkAdapter(DiffSync): + """Adapter for loading device data from a network.""" + + def __init__(self, *args, job, sync, site_filter=None, **kwargs): + """Initialize the NautobotDiffSync.""" + super().__init__(*args, **kwargs) + self.job = job + self.sync = sync + self.site_filter = site_filter + + def load_devices(self): + """Query devices and load data into a Diffsync model.""" + + for ip_address in self.job.ip_addresses: + #TODO: Call onboarding job to query devices + self.job.logger.info(f"Attempting to load data from {ip_address}") + + def load(self): + """Load device data.""" + self.load_devices() + diff --git a/nautobot_device_onboarding/diffsync/models/network_importer_models.py b/nautobot_device_onboarding/diffsync/models/network_importer_models.py new file mode 100644 index 00000000..4a06bcfb --- /dev/null +++ b/nautobot_device_onboarding/diffsync/models/network_importer_models.py @@ -0,0 +1 @@ +"""Diffsync models.""" \ No newline at end of file diff --git a/nautobot_device_onboarding/diffsync/models/onboarding_models.py b/nautobot_device_onboarding/diffsync/models/onboarding_models.py new file mode 100644 index 00000000..c0f19d3e --- /dev/null +++ b/nautobot_device_onboarding/diffsync/models/onboarding_models.py @@ -0,0 +1,20 @@ +"""Diffsync models.""" + +from nautobot_ssot.contrib import NautobotModel +from nautobot.dcim.models import Device, DeviceType + +class OnboardingDevice(NautobotModel): + + _modelname = "device" + _model = Device + _identifiers = ("name",) + + name: str + +class OnboardingDeviceType(NautobotModel): + + _modelname = "device_type" + _model = DeviceType + _identifiers = ("model",) + + model: str \ No newline at end of file diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 09253032..a8bc5a0e 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -1,19 +1,24 @@ """Device Onboarding Jobs.""" from django.conf import settings +from django.templatetags.static import static from nautobot.apps.jobs import Job, ObjectVar, IntegerVar, StringVar, BooleanVar from nautobot.core.celery import register_jobs from nautobot.dcim.models import Location, DeviceType, Platform -from nautobot.extras.models import Role, SecretsGroup, SecretsGroupAssociation +from nautobot.extras.models import Role, SecretsGroup, SecretsGroupAssociation, Status from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from nautobot_device_onboarding.exceptions import OnboardException from nautobot_device_onboarding.helpers import onboarding_task_fqdn_to_ip from nautobot_device_onboarding.netdev_keeper import NetdevKeeper - +from nautobot_device_onboarding.diffsync.adapters.onboarding_adapters import OnboardingNautobotAdapter, OnboardingNetworkAdapter +from nautobot_device_onboarding.diffsync.adapters.network_importer_adapters import NetworkImporterNautobotAdapter, NetworkImporterNetworkAdapter +from nautobot_ssot.jobs.base import DataSource PLUGIN_SETTINGS = settings.PLUGINS_CONFIG["nautobot_device_onboarding"] +name = "Device Onboarding/Network Importer" + class OnboardingTask(Job): # pylint: disable=too-many-instance-attributes """Nautobot Job for onboarding a new device.""" @@ -180,4 +185,108 @@ def _parse_credentials(self, credentials): self.secret = settings.NAPALM_ARGS.get("secret", None) -register_jobs(OnboardingTask) +class SSOTDeviceOnboarding(DataSource): + """Job for syncing basic device info from a network into Nautobot.""" + + class Meta: + """Metadata about this Job.""" + + name = "Sync Devices" + description = "Synchronize basic device information into Nautobot" + + debug = BooleanVar(description="Enable for more verbose logging.") + + location = ObjectVar( + model=Location, + query_params={"content_type": "dcim.device"}, + description="Assigned Location for the onboarded device(s)", + ) + ip_addresses = StringVar( + description="IP Address/DNS Name of the device to onboard, specify in a comma separated list for multiple devices.", + label="IP Address/FQDN", + ) + port = IntegerVar(default=22) + timeout = IntegerVar(default=30) + credentials = ObjectVar( + model=SecretsGroup, required=False, description="SecretsGroup for Device connection credentials." + ) + platform = ObjectVar( + model=Platform, + required=False, + description="Device platform. Define ONLY to override auto-recognition of platform.", + ) + role = ObjectVar( + model=Role, + query_params={"content_types": "dcim.device"}, + required=False, + description="Device role. Define ONLY to override auto-recognition of role.", + ) + device_type = ObjectVar( + model=DeviceType, + label="Device Type", + required=False, + description="Device type. Define ONLY to override auto-recognition of type.", + ) + continue_on_failure = BooleanVar( + label="Continue On Failure", + default=True, + description="If an exception occurs, log the exception and continue to next device.", + ) + + def load_source_adapter(self): + """Load onboarding network adapter.""" + self.logger.info("Loading device data from network devices...") + self.source_adapter = OnboardingNetworkAdapter(job=self, sync=self.sync) + self.source_adapter.load() + + def load_target_adapter(self): + """Load onboarding nautobot adapter.""" + self.logger.info("Loading device data from Nautobot...") + self.target_adapter = OnboardingNautobotAdapter(job=self, sync=self.sync) + self.target_adapter.load() + + def run(self, + dryrun, + memory_profiling, + location, + ip_addresses, + port, + timeout, + credentials, + platform, + role, + device_type, + continue_on_failure, + *args, + **kwargs + ): # pylint:disable=arguments-differ + """Run sync.""" + self.dryrun = dryrun + self.memory_profiling = memory_profiling + self.location = location, + self.ip_addresses=ip_addresses, + self.port = port, + self.timeout = timeout, + self.credentials = credentials, + self.platform = platform, + self.role = role, + self.device_type = device_type, + self.continue_on_failure = continue_on_failure + super().run(dryrun, memory_profiling, *args, **kwargs) + +class SSOTNetworkImporter(DataSource): + """Job syncing extended device attributes into Nautobot.""" + + debug = BooleanVar(description="Enable for more verbose logging.") + + class Meta: + """Metadata about this Job.""" + + name = "Sync Network Data" + description = "Synchronize extended device attribute information into Nautobot; "\ + "including Interfaces, IPAddresses, Prefixes, Vlans and Cables." + + + +jobs = [OnboardingTask, SSOTDeviceOnboarding, SSOTNetworkImporter] +register_jobs(*jobs) diff --git a/pyproject.toml b/pyproject.toml index 18f1219c..e517a7f7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,7 +29,9 @@ packages = [ python = ">=3.8,<3.12" napalm = ">=2.5.0, <5" zipp = "^3.4.0" -nautobot = "^2.0.3" +nautobot = "^2.1.1" +nautobot-ssot = "^2.1.0" + [tool.poetry.group.dev.dependencies] bandit = "*" diff --git a/tasks.py b/tasks.py index 631063d5..f062a527 100644 --- a/tasks.py +++ b/tasks.py @@ -46,7 +46,7 @@ def is_truthy(arg): namespace.configure( { "nautobot_device_onboarding": { - "nautobot_ver": "2.0.3", + "nautobot_ver": "2.1.1", "project_name": "nautobot-device-onboarding", "python_ver": "3.11", "local": False, From bfa574063b990ad2206cc50ab1a5f778525c9620 Mon Sep 17 00:00:00 2001 From: David Cates Date: Wed, 17 Jan 2024 22:25:53 -0700 Subject: [PATCH 004/225] update ssot integration --- .../diffsync/adapters/onboarding_adapters.py | 80 ++++++++++++++++-- .../diffsync/models/onboarding_models.py | 29 ++++++- nautobot_device_onboarding/jobs.py | 82 ++++++++----------- 3 files changed, 135 insertions(+), 56 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index 13ab8a97..70f9f004 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -3,34 +3,98 @@ from diffsync import DiffSync from nautobot_ssot.contrib import NautobotAdapter from nautobot_device_onboarding.diffsync.models import onboarding_models +import netaddr +####################################### +# FOR TESTING ONLY - TO BE REMOVED # +####################################### +mock_data = { + "10.1.1.8": { + "hostname": "demo-cisco-xe", + "serial_number": "9ABUXU580QS", + "device_type": "CSR1000V", + "mgmt_ip_address": "10.1.1.8", + } +} +####################################### +####################################### class OnboardingNautobotAdapter(NautobotAdapter): """Adapter for loading Nautobot data.""" device_type = onboarding_models.OnboardingDeviceType device = onboarding_models.OnboardingDevice + interface = onboarding_models.OnboardingInterface top_level = ["device_type", "device"] class OnboardingNetworkAdapter(DiffSync): """Adapter for loading device data from a network.""" - def __init__(self, *args, job, sync, site_filter=None, **kwargs): + device_type = onboarding_models.OnboardingDeviceType + device = onboarding_models.OnboardingDevice + interface = onboarding_models.OnboardingInterface + + top_level = ["device_type", "device"] + + def __init__( + self, + job: object, + sync: object, + *args, + **kwargs + ): """Initialize the NautobotDiffSync.""" super().__init__(*args, **kwargs) self.job = job self.sync = sync - self.site_filter = site_filter + + def _validate_ip_addresses(self, ip_addresses: list): + """Validate the format of each IP Address in a list of IP Addresses.""" + # Validate IP Addresses + validation_successful = True + for ip_address in ip_addresses: + try: + netaddr.IPAddress(ip_address) + except netaddr.AddrFormatError: + self.job.logger.error(f"[{ip_address}] is not a valid IP Address ") + validation_successful = False + if validation_successful: + return True + else: + raise netaddr.AddrConversionError def load_devices(self): - """Query devices and load data into a Diffsync model.""" - - for ip_address in self.job.ip_addresses: - #TODO: Call onboarding job to query devices - self.job.logger.info(f"Attempting to load data from {ip_address}") + """Query devices and load device data into a DiffSync model.""" + + # PROVIDE TO JOB: ip4address, port, timeout, secrets_group, platform (optional) + #TODO: Call onboarding job to query devices + + for ip_address in mock_data: + if self.job.debug: + self.job.logger.debug(f"loading device data for {ip_address}") + onboarding_device = self.device( + diffsync=self, + primary_ip4__host=ip_address, + location__name=self.job.location.name, + role__name=self.job.role.name, + device_type__model=mock_data[ip_address]["device_type"], + ) + self.add(onboarding_device) + + def load_device_types(self): + """Query devices and load device type data into a DiffSync model.""" + for ip_address in mock_data: + if self.job.debug: + self.job.logger.debug(f"loading device_type data for {ip_address}") + onboarding_device_type = self.device_type( + diffsync=self, + model = mock_data[ip_address]["device_type"] + ) + self.add(onboarding_device_type) def load(self): """Load device data.""" + self._validate_ip_addresses(self.job.ip_addresses) self.load_devices() - + self.load_device_types() diff --git a/nautobot_device_onboarding/diffsync/models/onboarding_models.py b/nautobot_device_onboarding/diffsync/models/onboarding_models.py index c0f19d3e..a66b6d1d 100644 --- a/nautobot_device_onboarding/diffsync/models/onboarding_models.py +++ b/nautobot_device_onboarding/diffsync/models/onboarding_models.py @@ -1,12 +1,37 @@ """Diffsync models.""" from nautobot_ssot.contrib import NautobotModel -from nautobot.dcim.models import Device, DeviceType +from nautobot.dcim.models import Device, DeviceType, Interface +from typing import List, Optional class OnboardingDevice(NautobotModel): _modelname = "device" _model = Device + _identifiers = ("primary_ip4__host",) + _attributes = ( + "location__name", + "device_type__model", + "role__name", + "platform__name", + ) + _children = { + "interface": "interfaces", + } + + primary_ip4__host: str + + location__name: Optional[str] + device_type__model: Optional[str] + role__name: Optional[str] + platform__name: Optional[str] + + interfaces: List["Interface"] = [] + +class OnboardingInterface(NautobotModel): + + _modelname = "interface" + _model = Interface _identifiers = ("name",) name: str @@ -16,5 +41,5 @@ class OnboardingDeviceType(NautobotModel): _modelname = "device_type" _model = DeviceType _identifiers = ("model",) - + model: str \ No newline at end of file diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index a8bc5a0e..0f75ded1 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -4,6 +4,7 @@ from nautobot.apps.jobs import Job, ObjectVar, IntegerVar, StringVar, BooleanVar from nautobot.core.celery import register_jobs from nautobot.dcim.models import Location, DeviceType, Platform +from nautobot.ipam.models import Namespace from nautobot.extras.models import Role, SecretsGroup, SecretsGroupAssociation, Status from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices @@ -201,77 +202,66 @@ class Meta: query_params={"content_type": "dcim.device"}, description="Assigned Location for the onboarded device(s)", ) + namespace = ObjectVar( + model=Namespace, + description="Namespace IP Addresses belong to." + ) ip_addresses = StringVar( - description="IP Address/DNS Name of the device to onboard, specify in a comma separated list for multiple devices.", - label="IP Address/FQDN", + description="IP Address of the device to onboard, specify in a comma separated list for multiple devices.", + label="IPv4 Addresses", + ) + role = ObjectVar( + model=Role, + query_params={"content_types": "dcim.device"}, + required=True, + description="Role to be applied to all onboarded devices", + ) + status = ObjectVar( + model=Status, + query_params={"content_types": "dcim.device"}, + required=True, + description="Status to be applied to all onboarded devices", ) port = IntegerVar(default=22) timeout = IntegerVar(default=30) credentials = ObjectVar( - model=SecretsGroup, required=False, description="SecretsGroup for Device connection credentials." + model=SecretsGroup, required=True, description="SecretsGroup for Device connection credentials." ) platform = ObjectVar( model=Platform, required=False, description="Device platform. Define ONLY to override auto-recognition of platform.", ) - role = ObjectVar( - model=Role, - query_params={"content_types": "dcim.device"}, - required=False, - description="Device role. Define ONLY to override auto-recognition of role.", - ) - device_type = ObjectVar( - model=DeviceType, - label="Device Type", - required=False, - description="Device type. Define ONLY to override auto-recognition of type.", - ) - continue_on_failure = BooleanVar( - label="Continue On Failure", + skip_device_type_update = BooleanVar( + label="Skip Device Type Update", default=True, - description="If an exception occurs, log the exception and continue to next device.", + description="If a device exists in Nautobot, do not update its associated device type.", ) def load_source_adapter(self): """Load onboarding network adapter.""" - self.logger.info("Loading device data from network devices...") self.source_adapter = OnboardingNetworkAdapter(job=self, sync=self.sync) self.source_adapter.load() def load_target_adapter(self): - """Load onboarding nautobot adapter.""" - self.logger.info("Loading device data from Nautobot...") + """Load onboarding Nautobot adapter.""" self.target_adapter = OnboardingNautobotAdapter(job=self, sync=self.sync) self.target_adapter.load() - def run(self, - dryrun, - memory_profiling, - location, - ip_addresses, - port, - timeout, - credentials, - platform, - role, - device_type, - continue_on_failure, - *args, - **kwargs - ): # pylint:disable=arguments-differ + def run(self, dryrun, memory_profiling, *args, **kwargs): # pylint:disable=arguments-differ """Run sync.""" + self.dryrun = dryrun self.memory_profiling = memory_profiling - self.location = location, - self.ip_addresses=ip_addresses, - self.port = port, - self.timeout = timeout, - self.credentials = credentials, - self.platform = platform, - self.role = role, - self.device_type = device_type, - self.continue_on_failure = continue_on_failure + self.location = kwargs["location"] + self.ip_addresses = kwargs["ip_addresses"].replace(" ", "").split(",") + self.role = kwargs["role"] + self.status = kwargs["status"] + self.port = kwargs["port"] + self.timeout = kwargs["timeout"] + self.credentials = kwargs["credentials"] + self.platform = kwargs["platform"] + self.skip_device_type_update = kwargs["skip_device_type_update"] super().run(dryrun, memory_profiling, *args, **kwargs) class SSOTNetworkImporter(DataSource): @@ -288,5 +278,5 @@ class Meta: -jobs = [OnboardingTask, SSOTDeviceOnboarding, SSOTNetworkImporter] +jobs = [OnboardingTask, SSOTDeviceOnboarding] register_jobs(*jobs) From 131fb2cdeac17b162edf66c021ac7b0a62f61e77 Mon Sep 17 00:00:00 2001 From: David Cates Date: Thu, 18 Jan 2024 22:08:33 -0700 Subject: [PATCH 005/225] update ssot integration --- .../diffsync/__init__.py | 2 +- .../diffsync/adapters/__init__.py | 2 +- .../adapters/network_importer_adapters.py | 3 +- .../diffsync/adapters/onboarding_adapters.py | 128 ++++++++++++++---- .../models/network_importer_models.py | 2 +- .../diffsync/models/onboarding_models.py | 104 +++++++++++--- nautobot_device_onboarding/jobs.py | 37 +++-- 7 files changed, 218 insertions(+), 60 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/__init__.py b/nautobot_device_onboarding/diffsync/__init__.py index 71c0e44d..217de7de 100644 --- a/nautobot_device_onboarding/diffsync/__init__.py +++ b/nautobot_device_onboarding/diffsync/__init__.py @@ -1 +1 @@ -"""Diffsync.""" \ No newline at end of file +"""Diffsync.""" diff --git a/nautobot_device_onboarding/diffsync/adapters/__init__.py b/nautobot_device_onboarding/diffsync/adapters/__init__.py index 8f0a3e84..125a53f3 100644 --- a/nautobot_device_onboarding/diffsync/adapters/__init__.py +++ b/nautobot_device_onboarding/diffsync/adapters/__init__.py @@ -1 +1 @@ -"""Adapter classes for loading DiffSyncModels with data from a Network or Nautobot.""" \ No newline at end of file +"""Adapter classes for loading DiffSyncModels with data from a Network or Nautobot.""" diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index d07b61a0..9c7421ab 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -1,6 +1,7 @@ """DiffSync adapters.""" from nautobot_ssot.contrib import NautobotAdapter + from diffsync import DiffSync @@ -9,4 +10,4 @@ class NetworkImporterNautobotAdapter(NautobotAdapter): class NetworkImporterNetworkAdapter(DiffSync): - pass \ No newline at end of file + pass diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index 70f9f004..b57746da 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -1,9 +1,12 @@ """DiffSync adapters.""" -from diffsync import DiffSync -from nautobot_ssot.contrib import NautobotAdapter +import netaddr +from nautobot.dcim.choices import InterfaceTypeChoices +from nautobot.dcim.models import Device from nautobot_device_onboarding.diffsync.models import onboarding_models -import netaddr +from nautobot_ssot.contrib import NautobotAdapter + +from diffsync import DiffSync ####################################### # FOR TESTING ONLY - TO BE REMOVED # @@ -12,44 +15,61 @@ "10.1.1.8": { "hostname": "demo-cisco-xe", "serial_number": "9ABUXU580QS", - "device_type": "CSR1000V", + "device_type": "CSR1000V2", "mgmt_ip_address": "10.1.1.8", + "mgmt_interface": "GigabitEthernet1", + "manufacturer": "Cisco", + "platform": "IOS", + "network_driver": "cisco_ios", + "prefix": "10.0.0.0/8" } } ####################################### ####################################### + class OnboardingNautobotAdapter(NautobotAdapter): """Adapter for loading Nautobot data.""" - device_type = onboarding_models.OnboardingDeviceType + manufacturer = onboarding_models.OnboardingManufacturer + platform = onboarding_models.OnboardingPlatform device = onboarding_models.OnboardingDevice + device_type = onboarding_models.OnboardingDeviceType interface = onboarding_models.OnboardingInterface + ip_address = onboarding_models.OnboardingIPAddress + + top_level = ["manufacturer", "platform", "device_type", "device"] + + def _load_objects(self, diffsync_model): + """Given a diffsync model class, load a list of models from the database and return them.""" + parameter_names = self._get_parameter_names(diffsync_model) + if diffsync_model._model == Device: + for database_object in diffsync_model._get_queryset(filter=self.job.ip_addresses): + self._load_single_object(database_object, diffsync_model, parameter_names) + else: + for database_object in diffsync_model._get_queryset(): + self._load_single_object(database_object, diffsync_model, parameter_names) - top_level = ["device_type", "device"] class OnboardingNetworkAdapter(DiffSync): """Adapter for loading device data from a network.""" - device_type = onboarding_models.OnboardingDeviceType + manufacturer = onboarding_models.OnboardingManufacturer + platform = onboarding_models.OnboardingPlatform device = onboarding_models.OnboardingDevice + device_type = onboarding_models.OnboardingDeviceType interface = onboarding_models.OnboardingInterface + ip_address = onboarding_models.OnboardingIPAddress - top_level = ["device_type", "device"] + top_level = ["manufacturer", "platform", "device_type", "device"] - def __init__( - self, - job: object, - sync: object, - *args, - **kwargs - ): + def __init__(self, job, sync, *args, **kwargs): """Initialize the NautobotDiffSync.""" super().__init__(*args, **kwargs) self.job = job self.sync = sync - def _validate_ip_addresses(self, ip_addresses: list): + def _validate_ip_addresses(self, ip_addresses): """Validate the format of each IP Address in a list of IP Addresses.""" # Validate IP Addresses validation_successful = True @@ -65,36 +85,96 @@ def _validate_ip_addresses(self, ip_addresses: list): raise netaddr.AddrConversionError def load_devices(self): - """Query devices and load device data into a DiffSync model.""" + """Load device data into a DiffSync model.""" # PROVIDE TO JOB: ip4address, port, timeout, secrets_group, platform (optional) - #TODO: Call onboarding job to query devices + # TODO: CHECK FOR FAILED CONNECTIONS AND DO NOT LOAD DATA, LOG FAILED IPs + # TODO: Call onboarding job to query devices for ip_address in mock_data: if self.job.debug: self.job.logger.debug(f"loading device data for {ip_address}") onboarding_device = self.device( diffsync=self, - primary_ip4__host=ip_address, + device_type__model=mock_data[ip_address]["device_type"], location__name=self.job.location.name, + name=mock_data[ip_address]["hostname"], + platform__name=mock_data[ip_address]["platform"], + primary_ip4__host=ip_address, role__name=self.job.role.name, - device_type__model=mock_data[ip_address]["device_type"], + status__name=self.job.device_status.name, + secrets_group__name=self.job.secrets_group.name, ) self.add(onboarding_device) + self.load_interface(onboarding_device, mock_data, ip_address) def load_device_types(self): - """Query devices and load device type data into a DiffSync model.""" + """Load device type data into a DiffSync model.""" for ip_address in mock_data: if self.job.debug: self.job.logger.debug(f"loading device_type data for {ip_address}") onboarding_device_type = self.device_type( diffsync=self, - model = mock_data[ip_address]["device_type"] + model=mock_data[ip_address]["device_type"], + manufacturer__name=mock_data[ip_address]["manufacturer"], ) self.add(onboarding_device_type) - + + def load_interface(self, onboarding_device, device_data, ip_address): + """Load interface data into a DiffSync model.""" + if self.job.debug: + self.job.logger.debug(f"loading interface data for {ip_address}") + onboarding_interface = self.interface( + diffsync=self, + name=device_data[ip_address]["mgmt_interface"], + device__name=device_data[ip_address]["hostname"], + status__name=self.job.interface_status.name, + type=InterfaceTypeChoices.TYPE_OTHER, + ) + self.add(onboarding_interface) + onboarding_device.add_child(onboarding_interface) + self.load_ip_address(onboarding_interface, mock_data, ip_address) + + def load_ip_address(self, onboarding_interface, device_data, ip_address): + """Load ip address data into a DiffSync model.""" + if self.job.debug: + self.job.logger.debug(f"loading ip address data for {ip_address}") + onboarding_ip_address = self.ip_address( + diffsync=self, + parent__network=device_data[ip_address]["prefix"], + host=ip_address, + ) + self.add(onboarding_ip_address) + onboarding_interface.add_child(onboarding_ip_address) + + def load_manufacturers(self): + """Load manufacturer data into a DiffSync model.""" + for ip_address in mock_data: + if self.job.debug: + self.job.logger.debug(f"loading manufacturer data for {ip_address}") + onboarding_manufacturer = self.manufacturer( + diffsync=self, + name=mock_data[ip_address]["manufacturer"], + ) + self.add(onboarding_manufacturer) + + def load_platforms(self): + """Load platform data into a DiffSync model.""" + for ip_address in mock_data: + if self.job.debug: + self.job.logger.debug(f"loading platform data for {ip_address}") + onboarding_platform = self.platform( + diffsync=self, + name=mock_data[ip_address]["platform"], + manufacturer__name=mock_data[ip_address]["manufacturer"], + network_driver=mock_data[ip_address]["network_driver"], + ) + self.add(onboarding_platform) + def load(self): """Load device data.""" self._validate_ip_addresses(self.job.ip_addresses) - self.load_devices() + self.load_manufacturers() + self.load_platforms() self.load_device_types() + self.load_devices() diff --git a/nautobot_device_onboarding/diffsync/models/network_importer_models.py b/nautobot_device_onboarding/diffsync/models/network_importer_models.py index 4a06bcfb..72f34562 100644 --- a/nautobot_device_onboarding/diffsync/models/network_importer_models.py +++ b/nautobot_device_onboarding/diffsync/models/network_importer_models.py @@ -1 +1 @@ -"""Diffsync models.""" \ No newline at end of file +"""Diffsync models.""" diff --git a/nautobot_device_onboarding/diffsync/models/onboarding_models.py b/nautobot_device_onboarding/diffsync/models/onboarding_models.py index a66b6d1d..c945a2ca 100644 --- a/nautobot_device_onboarding/diffsync/models/onboarding_models.py +++ b/nautobot_device_onboarding/diffsync/models/onboarding_models.py @@ -1,19 +1,26 @@ """Diffsync models.""" -from nautobot_ssot.contrib import NautobotModel -from nautobot.dcim.models import Device, DeviceType, Interface from typing import List, Optional -class OnboardingDevice(NautobotModel): +import netaddr +from nautobot.dcim.models import (Device, DeviceType, Interface, Manufacturer, + Platform) +from nautobot.ipam.models import IPAddress, Prefix +from nautobot_ssot.contrib import NautobotModel + +class OnboardingDevice(NautobotModel): _modelname = "device" _model = Device _identifiers = ("primary_ip4__host",) _attributes = ( - "location__name", - "device_type__model", - "role__name", - "platform__name", + "device_type__model", + "location__name", + "name", + "platform__name", + "role__name", + "secrets_group__name", + "status__name", ) _children = { "interface": "interfaces", @@ -21,25 +28,88 @@ class OnboardingDevice(NautobotModel): primary_ip4__host: str - location__name: Optional[str] device_type__model: Optional[str] - role__name: Optional[str] + location__name: Optional[str] + name: Optional[str] platform__name: Optional[str] + role__name: Optional[str] + secrets_group__name: Optional[str] + status__name: Optional[str] - interfaces: List["Interface"] = [] + interfaces: List["OnboardingInterface"] = [] + device_type: List["OnboardingDeviceType"] = [] -class OnboardingInterface(NautobotModel): + @classmethod + def _get_queryset(cls, filter: list = None): + """Get the queryset used to load the models data from Nautobot.""" + parameter_names = list(cls._identifiers) + list(cls._attributes) + # Here we identify any foreign keys (i.e. fields with '__' in them) so that we can load them directly in the + # first query if this function hasn't been overridden. + prefetch_related_parameters = [parameter.split("__")[0] for parameter in parameter_names if "__" in parameter] + qs = cls.get_queryset(filter=filter) + return qs.prefetch_related(*prefetch_related_parameters) + + @classmethod + def get_queryset(cls, filter: list = None): + """Get the queryset used to load the models data from Nautobot.""" + if filter: + # Only devices with a primary_ip that is being onboarded should be considered for the sync + return cls._model.objects.filter(primary_ip4__host__in=filter) + else: + return cls._model.objects.all() +class OnboardingDeviceType(NautobotModel): + _modelname = "device_type" + _model = DeviceType + _identifiers = ("model", "manufacturer__name") + + model: str + manufacturer__name: str + + +class OnboardingInterface(NautobotModel): _modelname = "interface" _model = Interface + _identifiers = ("name", "device__name") + _attributes = ( + "status__name", + "type", + ) + _children = {"ip_address": "ip_addresses"} + + name: str + device__name: str + + status__name: Optional[str] + type: Optional[str] + + ip_addresses: List["OnboardingIPAddress"] = [] + + +class OnboardingIPAddress(NautobotModel): + _modelname = "ip_address" + _model = IPAddress + _identifiers = ("parent__network", "host") + + parent__network: str + host: str + + +class OnboardingManufacturer(NautobotModel): + _modelname = "manufacturer" + _model = Manufacturer _identifiers = ("name",) name: str -class OnboardingDeviceType(NautobotModel): - _modelname = "device_type" - _model = DeviceType - _identifiers = ("model",) - - model: str \ No newline at end of file +class OnboardingPlatform(NautobotModel): + _modelname = "platform" + _model = Platform + _identifiers = ("name",) + _attributes = ("network_driver", "manufacturer__name") + + name: str + + network_driver: Optional[str] + manufacturer__name: Optional[str] diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 0f75ded1..a4286c92 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -14,6 +14,7 @@ from nautobot_device_onboarding.diffsync.adapters.onboarding_adapters import OnboardingNautobotAdapter, OnboardingNetworkAdapter from nautobot_device_onboarding.diffsync.adapters.network_importer_adapters import NetworkImporterNautobotAdapter, NetworkImporterNetworkAdapter from nautobot_ssot.jobs.base import DataSource +from diffsync.enum import DiffSyncFlags PLUGIN_SETTINGS = settings.PLUGINS_CONFIG["nautobot_device_onboarding"] @@ -189,13 +190,18 @@ def _parse_credentials(self, credentials): class SSOTDeviceOnboarding(DataSource): """Job for syncing basic device info from a network into Nautobot.""" + def __init__(self): + """Initialize SSOTDeviceOnboarding.""" + super().__init__() + self.diffsync_flags = DiffSyncFlags.SKIP_UNMATCHED_DST + class Meta: """Metadata about this Job.""" name = "Sync Devices" description = "Synchronize basic device information into Nautobot" - debug = BooleanVar(description="Enable for more verbose logging.") + debug = BooleanVar(description="Enable for more verbose logging.", default=False) location = ObjectVar( model=Location, @@ -204,7 +210,7 @@ class Meta: ) namespace = ObjectVar( model=Namespace, - description="Namespace IP Addresses belong to." + description="Namespace ip addresses belong to." ) ip_addresses = StringVar( description="IP Address of the device to onboard, specify in a comma separated list for multiple devices.", @@ -216,27 +222,28 @@ class Meta: required=True, description="Role to be applied to all onboarded devices", ) - status = ObjectVar( + device_status = ObjectVar( model=Status, query_params={"content_types": "dcim.device"}, required=True, description="Status to be applied to all onboarded devices", ) + interface_status = ObjectVar( + model=Status, + query_params={"content_types": "dcim.interface"}, + required=True, + description="Status to be applied to all onboarded device interfaces", + ) port = IntegerVar(default=22) timeout = IntegerVar(default=30) - credentials = ObjectVar( - model=SecretsGroup, required=True, description="SecretsGroup for Device connection credentials." + secrets_group = ObjectVar( + model=SecretsGroup, required=True, description="SecretsGroup for device connection credentials." ) platform = ObjectVar( model=Platform, required=False, description="Device platform. Define ONLY to override auto-recognition of platform.", ) - skip_device_type_update = BooleanVar( - label="Skip Device Type Update", - default=True, - description="If a device exists in Nautobot, do not update its associated device type.", - ) def load_source_adapter(self): """Load onboarding network adapter.""" @@ -253,15 +260,16 @@ def run(self, dryrun, memory_profiling, *args, **kwargs): # pylint:disable=argu self.dryrun = dryrun self.memory_profiling = memory_profiling + self.debug = kwargs["debug"] self.location = kwargs["location"] self.ip_addresses = kwargs["ip_addresses"].replace(" ", "").split(",") self.role = kwargs["role"] - self.status = kwargs["status"] + self.device_status = kwargs["device_status"] + self.interface_status = kwargs["interface_status"] self.port = kwargs["port"] self.timeout = kwargs["timeout"] - self.credentials = kwargs["credentials"] + self.secrets_group = kwargs["secrets_group"] self.platform = kwargs["platform"] - self.skip_device_type_update = kwargs["skip_device_type_update"] super().run(dryrun, memory_profiling, *args, **kwargs) class SSOTNetworkImporter(DataSource): @@ -275,8 +283,7 @@ class Meta: name = "Sync Network Data" description = "Synchronize extended device attribute information into Nautobot; "\ "including Interfaces, IPAddresses, Prefixes, Vlans and Cables." - - + jobs = [OnboardingTask, SSOTDeviceOnboarding] register_jobs(*jobs) From c687f2a386df2feba5959dbc6ca5ce65bdd813fb Mon Sep 17 00:00:00 2001 From: David Cates Date: Fri, 19 Jan 2024 15:54:38 -0700 Subject: [PATCH 006/225] add command getter job --- development/nautobot_config.py | 14 +- .../diffsync/adapters/onboarding_adapters.py | 17 +- .../diffsync/models/onboarding_models.py | 16 +- nautobot_device_onboarding/jobs.py | 198 +++++++++++++++++- pyproject.toml | 3 +- 5 files changed, 237 insertions(+), 11 deletions(-) diff --git a/development/nautobot_config.py b/development/nautobot_config.py index 48e7f1a5..4b51769f 100644 --- a/development/nautobot_config.py +++ b/development/nautobot_config.py @@ -132,7 +132,8 @@ PLUGINS = [ "nautobot_device_onboarding", "nautobot_ssot", - ] + "nautobot_plugin_nornir", +] # Apps configuration settings. These settings are used by various Apps that the user may have installed. # Each key in the dictionary is the name of an installed App and its value is a dictionary of settings. @@ -142,4 +143,15 @@ "nautobot_ssot": { "hide_example_jobs": is_truthy(os.getenv("NAUTOBOT_SSOT_HIDE_EXAMPLE_JOBS")), }, + "nautobot_plugin_nornir": { + "nornir_settings": { + "credentials": "nautobot_plugin_nornir.plugins.credentials.env_vars.CredentialsEnvVars", + "runner": { + "plugin": "threaded", + "options": { + "num_workers": 20, + }, + }, + }, + }, } diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index b57746da..d0bae98a 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -8,6 +8,8 @@ from diffsync import DiffSync +from nautobot.extras.models.jobs import Job as JobModel + ####################################### # FOR TESTING ONLY - TO BE REMOVED # ####################################### @@ -21,7 +23,9 @@ "manufacturer": "Cisco", "platform": "IOS", "network_driver": "cisco_ios", - "prefix": "10.0.0.0/8" + "prefix": "10.0.0.0", # this is the network field on the Prefix model + "prefix_length": 8, + "mask_length": 24, } } ####################################### @@ -91,6 +95,11 @@ def load_devices(self): # TODO: CHECK FOR FAILED CONNECTIONS AND DO NOT LOAD DATA, LOG FAILED IPs # TODO: Call onboarding job to query devices + command_getter_job = JobModel.objects.get(name="Command Getter for Device Onboarding").job_task + result = command_getter_job.s() + result.apply_async(args=self.job.job_result.task_args, kwargs=self.job.job_result.task_kwargs, **self.job.job_result.celery_kwargs) + + for ip_address in mock_data: if self.job.debug: self.job.logger.debug(f"loading device data for {ip_address}") @@ -101,7 +110,7 @@ def load_devices(self): name=mock_data[ip_address]["hostname"], platform__name=mock_data[ip_address]["platform"], primary_ip4__host=ip_address, - role__name=self.job.role.name, + role__name=self.job.device_role.name, status__name=self.job.device_status.name, secrets_group__name=self.job.secrets_group.name, ) @@ -130,6 +139,7 @@ def load_interface(self, onboarding_device, device_data, ip_address): device__name=device_data[ip_address]["hostname"], status__name=self.job.interface_status.name, type=InterfaceTypeChoices.TYPE_OTHER, + mgmt_only=self.job.management_only_interface, ) self.add(onboarding_interface) onboarding_device.add_child(onboarding_interface) @@ -141,8 +151,11 @@ def load_ip_address(self, onboarding_interface, device_data, ip_address): self.job.logger.debug(f"loading ip address data for {ip_address}") onboarding_ip_address = self.ip_address( diffsync=self, + parent__namespace__name=self.job.namespace.name, parent__network=device_data[ip_address]["prefix"], + parent__prefix_length=device_data[ip_address]["prefix_length"], host=ip_address, + mask_length=device_data[ip_address]["mask_length"], ) self.add(onboarding_ip_address) onboarding_interface.add_child(onboarding_ip_address) diff --git a/nautobot_device_onboarding/diffsync/models/onboarding_models.py b/nautobot_device_onboarding/diffsync/models/onboarding_models.py index c945a2ca..162d36bc 100644 --- a/nautobot_device_onboarding/diffsync/models/onboarding_models.py +++ b/nautobot_device_onboarding/diffsync/models/onboarding_models.py @@ -58,6 +58,7 @@ def get_queryset(cls, filter: list = None): else: return cls._model.objects.all() + class OnboardingDeviceType(NautobotModel): _modelname = "device_type" _model = DeviceType @@ -72,6 +73,7 @@ class OnboardingInterface(NautobotModel): _model = Interface _identifiers = ("name", "device__name") _attributes = ( + "mgmt_only", "status__name", "type", ) @@ -80,6 +82,7 @@ class OnboardingInterface(NautobotModel): name: str device__name: str + mgmt_only: Optional[bool] status__name: Optional[str] type: Optional[str] @@ -89,10 +92,19 @@ class OnboardingInterface(NautobotModel): class OnboardingIPAddress(NautobotModel): _modelname = "ip_address" _model = IPAddress - _identifiers = ("parent__network", "host") - + _identifiers = ( + "parent__namespace__name", + "parent__network", + "parent__prefix_length", + "host", + "mask_length", + ) + + parent__namespace__name: str parent__network: str + parent__prefix_length: int host: str + mask_length: int class OnboardingManufacturer(NautobotModel): diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index a4286c92..a5df60af 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -16,6 +16,31 @@ from nautobot_ssot.jobs.base import DataSource from diffsync.enum import DiffSyncFlags +from django.conf import settings +from nautobot.apps.jobs import Job, ObjectVar, IntegerVar, StringVar, BooleanVar +from nautobot.core.celery import register_jobs +from nautobot.dcim.models import Location, DeviceType, Platform +from nautobot.extras.models import Role, SecretsGroup, SecretsGroupAssociation +from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices +from netmiko import SSHDetect +from nornir import InitNornir + +from nornir_netmiko.tasks import netmiko_send_command +from nornir.core.plugins.inventory import InventoryPluginRegister +from nornir.core.task import Result, Task +from nornir.core.inventory import ( + Inventory, + ConnectionOptions, + Defaults, + Groups, + Host, + Hosts, +) + +from nautobot_device_onboarding.exceptions import OnboardException +from nautobot_device_onboarding.helpers import onboarding_task_fqdn_to_ip +from nautobot_device_onboarding.netdev_keeper import NetdevKeeper + PLUGIN_SETTINGS = settings.PLUGINS_CONFIG["nautobot_device_onboarding"] @@ -201,7 +226,7 @@ class Meta: name = "Sync Devices" description = "Synchronize basic device information into Nautobot" - debug = BooleanVar(description="Enable for more verbose logging.", default=False) + debug = BooleanVar( default=False, description="Enable for more verbose logging.",) location = ObjectVar( model=Location, @@ -216,7 +241,12 @@ class Meta: description="IP Address of the device to onboard, specify in a comma separated list for multiple devices.", label="IPv4 Addresses", ) - role = ObjectVar( + management_only_interface = BooleanVar( + default=False, + label="Set Management Only", + description="If True, interfaces that are created or updated will be set to management only. If False, the interface will be set to not be management only.", + ) + device_role = ObjectVar( model=Role, query_params={"content_types": "dcim.device"}, required=True, @@ -244,7 +274,7 @@ class Meta: required=False, description="Device platform. Define ONLY to override auto-recognition of platform.", ) - + def load_source_adapter(self): """Load onboarding network adapter.""" self.source_adapter = OnboardingNetworkAdapter(job=self, sync=self.sync) @@ -262,8 +292,10 @@ def run(self, dryrun, memory_profiling, *args, **kwargs): # pylint:disable=argu self.memory_profiling = memory_profiling self.debug = kwargs["debug"] self.location = kwargs["location"] + self.namespace = kwargs["namespace"] self.ip_addresses = kwargs["ip_addresses"].replace(" ", "").split(",") - self.role = kwargs["role"] + self.management_only_interface = kwargs["management_only_interface"] + self.device_role = kwargs["device_role"] self.device_status = kwargs["device_status"] self.interface_status = kwargs["interface_status"] self.port = kwargs["port"] @@ -285,5 +317,161 @@ class Meta: "including Interfaces, IPAddresses, Prefixes, Vlans and Cables." -jobs = [OnboardingTask, SSOTDeviceOnboarding] +PLATFORM_COMMAND_MAP = { + "cisco_ios": ["show version", "show inventory", "show interfaces"], + "cisco_nxos": ["show version", "show inventory", "show interface"], + } + +def netmiko_send_commands(task: Task): + platform = task.host.platform or 'default' + for command in PLATFORM_COMMAND_MAP.get(platform): + task.run(task=netmiko_send_command, command_string=command, use_textfsm=True) + +class CommandGetterDO(Job): + class Meta: # pylint: disable=too-few-public-methods + """Meta object boilerplate for onboarding.""" + + name = "Command Getter for Device Onboarding" + description = "Login to a device(s) and run commands." + has_sensitive_variables = False + hidden = False + + class EmptyInventory: + """Creates an empty Nornir Inventory to be populated later.""" + def __init__(self, *args, **kwargs): + pass + + def load(self) -> Inventory: + """Create a default empty inventory.""" + hosts = Hosts() + defaults = Defaults(data={}) + groups = Groups() + return Inventory(hosts=hosts, groups=groups, defaults=defaults) + + InventoryPluginRegister.register("empty-inventory", EmptyInventory) + + def __init__(self, *args, **kwargs): + self.username = None + self.password = None + self.secret = None + self.secrets_group = None + self.ip4address = None + self.platform = None + self.port = None + self.timeout = None + super().__init__(*args, **kwargs) + + def _parse_credentials(self, credentials): + """Parse and return dictionary of credentials.""" + if credentials: + self.logger.info("Attempting to parse credentials from selected SecretGroup") + try: + self.username = credentials.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, + secret_type=SecretsGroupSecretTypeChoices.TYPE_USERNAME, + ) + self.password = credentials.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, + secret_type=SecretsGroupSecretTypeChoices.TYPE_PASSWORD, + ) + try: + self.secret = credentials.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, + secret_type=SecretsGroupSecretTypeChoices.TYPE_SECRET, + ) + except Exception as e: + self.secret = None + except Exception as err: + self.logger.exception(f"Unable to use SecretsGroup selected, ensure Access Type is set to Generic & at minimum Username & Password types are set.", {e} + ) + raise OnboardException("fail-credentials - Unable to parse selected credentials.") from err + + else: + self.logger.info("Using napalm credentials configured in nautobot_config.py") + self.username = settings.NAPALM_USERNAME + self.password = settings.NAPALM_PASSWORD + self.secret = settings.NAPALM_ARGS.get("secret", None) + + def guess_netmiko_device_type(self, hostname, username, password): + """Guess the device type of host, based on Netmiko.""" + guessed_device_type = None + + netmiko_optional_args = {} + + remote_device = { + "device_type": "autodetect", + "host": hostname, + "username": username, + "password": password, + **netmiko_optional_args, + } + + try: + guesser = SSHDetect(**remote_device) + guessed_device_type = guesser.autodetect() + + except Exception as err: + print(err) + return guessed_device_type + + def run(self): + mock_job_data = {"ip4address": "174.51.52.76,10.1.1.1", "platform": "cisco_ios", "secrets_group": SecretsGroup.objects.get(name="Cisco Devices"), "port": 8922,"timeout": 30} + + """Process onboarding task from ssot-ni job.""" + self.ip4address = mock_job_data["ip4address"] + self.secrets_group = mock_job_data["secrets_group"] + self.platform = mock_job_data["platform"] + self.port = mock_job_data["port"] + self.timeout = mock_job_data["timeout"] + + # Initiate Nornir instance with empty inventory + try: + with InitNornir(inventory={"plugin": "empty-inventory"}) as nr: + + # Parse credentials from SecretsGroup + self._parse_credentials(mock_job_data["secrets_group"]) + + # Build Nornir Inventory + ip_address = mock_job_data["ip4address"].split(",") + self.platform = mock_job_data.get("platform", None) + for h in ip_address: + if not self.platform: + self.platform = self.guess_netmiko_device_type(h, self.username, self.password) + + host = Host( + name=h, + hostname=h, + port=mock_job_data["port"], + username=self.username, + password=self.password, + platform=self.platform, + connection_options={ + "netmiko": ConnectionOptions( + hostname=h, + port=mock_job_data["port"], + username=self.username, + password=self.password, + platform=self.platform, + ) + }, + ) + nr.inventory.hosts.update({h: host}) + self.logger.info(nr.inventory.hosts) + + self.logger.info(f"Inventory built for {len(ip_address)} devices") + + results = nr.run(task=netmiko_send_commands) + + for agg_result in results: + for r in results[agg_result]: + self.logger.info(f"host: {r.host}") + self.logger.info(f"result: {r.result}") + + except Exception as err: + self.logger.info(f"Error: {err}") + return err + return {"addtional_data": "working"} + + +jobs = [OnboardingTask, SSOTDeviceOnboarding, CommandGetterDO] register_jobs(*jobs) diff --git a/pyproject.toml b/pyproject.toml index e517a7f7..62e0bb83 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,7 +30,8 @@ python = ">=3.8,<3.12" napalm = ">=2.5.0, <5" zipp = "^3.4.0" nautobot = "^2.1.1" -nautobot-ssot = "^2.1.0" +nautobot-ssot = "^2.2.0" +nautobot-plugin-nornir = "2.0.0" [tool.poetry.group.dev.dependencies] From 6f9b7771c1890ce9f829896eb4b204be4d0bb9ed Mon Sep 17 00:00:00 2001 From: David Cates Date: Fri, 19 Jan 2024 15:59:23 -0700 Subject: [PATCH 007/225] update lock --- poetry.lock | 1841 +++++++++++++++++++++++++++++---------------------- 1 file changed, 1064 insertions(+), 777 deletions(-) diff --git a/poetry.lock b/poetry.lock index f91e0c4e..1e961cd9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "amqp" version = "5.2.0" description = "Low-level AMQP client for Python (fork of amqplib)." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -19,7 +18,6 @@ vine = ">=5.0.0,<6.0.0" name = "aniso8601" version = "7.0.0" description = "A library for parsing ISO 8601 strings." -category = "main" optional = false python-versions = "*" files = [ @@ -27,11 +25,32 @@ files = [ {file = "aniso8601-7.0.0.tar.gz", hash = "sha256:513d2b6637b7853806ae79ffaca6f3e8754bdd547048f5ccc1420aec4b714f1e"}, ] +[[package]] +name = "anyio" +version = "4.2.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"}, + {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + [[package]] name = "asgiref" version = "3.7.2" description = "ASGI specs, helper code, and adapters" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -49,7 +68,6 @@ tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] name = "astroid" version = "2.15.8" description = "An abstract syntax tree for Python with inference support." -category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -69,7 +87,6 @@ wrapt = [ name = "async-timeout" version = "4.0.3" description = "Timeout context manager for asyncio programs" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -79,28 +96,27 @@ files = [ [[package]] name = "attrs" -version = "23.1.0" +version = "23.2.0" description = "Classes Without Boilerplate" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, - {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, ] [package.extras] cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]", "pre-commit"] +dev = ["attrs[tests]", "pre-commit"] docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] [[package]] name = "backports-zoneinfo" version = "0.2.1" description = "Backport of the standard library zoneinfo module" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -130,19 +146,18 @@ tzdata = ["tzdata"] [[package]] name = "bandit" -version = "1.7.5" +version = "1.7.6" description = "Security oriented static analyser for python code." -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "bandit-1.7.5-py3-none-any.whl", hash = "sha256:75665181dc1e0096369112541a056c59d1c5f66f9bb74a8d686c3c362b83f549"}, - {file = "bandit-1.7.5.tar.gz", hash = "sha256:bdfc739baa03b880c2d15d0431b31c658ffc348e907fe197e54e0389dd59e11e"}, + {file = "bandit-1.7.6-py3-none-any.whl", hash = "sha256:36da17c67fc87579a5d20c323c8d0b1643a890a2b93f00b3d1229966624694ff"}, + {file = "bandit-1.7.6.tar.gz", hash = "sha256:72ce7bc9741374d96fb2f1c9a8960829885f1243ffde743de70a19cee353e8f3"}, ] [package.dependencies] colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} -GitPython = ">=1.0.1" +GitPython = ">=3.1.30" PyYAML = ">=5.3.1" rich = "*" stevedore = ">=1.20.0" @@ -154,33 +169,38 @@ yaml = ["PyYAML"] [[package]] name = "bcrypt" -version = "4.0.1" +version = "4.1.2" description = "Modern password hashing for your software and your servers" -category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2"}, - {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535"}, - {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e"}, - {file = "bcrypt-4.0.1-cp36-abi3-win32.whl", hash = "sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab"}, - {file = "bcrypt-4.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71"}, - {file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"}, + {file = "bcrypt-4.1.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:ac621c093edb28200728a9cca214d7e838529e557027ef0581685909acd28b5e"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea505c97a5c465ab8c3ba75c0805a102ce526695cd6818c6de3b1a38f6f60da1"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57fa9442758da926ed33a91644649d3e340a71e2d0a5a8de064fb621fd5a3326"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:eb3bd3321517916696233b5e0c67fd7d6281f0ef48e66812db35fc963a422a1c"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6cad43d8c63f34b26aef462b6f5e44fdcf9860b723d2453b5d391258c4c8e966"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:44290ccc827d3a24604f2c8bcd00d0da349e336e6503656cb8192133e27335e2"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:732b3920a08eacf12f93e6b04ea276c489f1c8fb49344f564cca2adb663b3e4c"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1c28973decf4e0e69cee78c68e30a523be441972c826703bb93099868a8ff5b5"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b8df79979c5bae07f1db22dcc49cc5bccf08a0380ca5c6f391cbb5790355c0b0"}, + {file = "bcrypt-4.1.2-cp37-abi3-win32.whl", hash = "sha256:fbe188b878313d01b7718390f31528be4010fed1faa798c5a1d0469c9c48c369"}, + {file = "bcrypt-4.1.2-cp37-abi3-win_amd64.whl", hash = "sha256:9800ae5bd5077b13725e2e3934aa3c9c37e49d3ea3d06318010aa40f54c63551"}, + {file = "bcrypt-4.1.2-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:71b8be82bc46cedd61a9f4ccb6c1a493211d031415a34adde3669ee1b0afbb63"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e3c6642077b0c8092580c819c1684161262b2e30c4f45deb000c38947bf483"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:387e7e1af9a4dd636b9505a465032f2f5cb8e61ba1120e79a0e1cd0b512f3dfc"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f70d9c61f9c4ca7d57f3bfe88a5ccf62546ffbadf3681bb1e268d9d2e41c91a7"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2a298db2a8ab20056120b45e86c00a0a5eb50ec4075b6142db35f593b97cb3fb"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ba55e40de38a24e2d78d34c2d36d6e864f93e0d79d0b6ce915e4335aa81d01b1"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3566a88234e8de2ccae31968127b0ecccbb4cddb629da744165db72b58d88ca4"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b90e216dc36864ae7132cb151ffe95155a37a14e0de3a8f64b49655dd959ff9c"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:69057b9fc5093ea1ab00dd24ede891f3e5e65bee040395fb1e66ee196f9c9b4a"}, + {file = "bcrypt-4.1.2-cp39-abi3-win32.whl", hash = "sha256:02d9ef8915f72dd6daaef40e0baeef8a017ce624369f09754baf32bb32dba25f"}, + {file = "bcrypt-4.1.2-cp39-abi3-win_amd64.whl", hash = "sha256:be3ab1071662f6065899fe08428e45c16aa36e28bc42921c4901a191fda6ee42"}, + {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d75fc8cd0ba23f97bae88a6ec04e9e5351ff3c6ad06f38fe32ba50cbd0d11946"}, + {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:a97e07e83e3262599434816f631cc4c7ca2aa8e9c072c1b1a7fec2ae809a1d2d"}, + {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e51c42750b7585cee7892c2614be0d14107fad9581d1738d954a262556dd1aab"}, + {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba4e4cc26610581a6329b3937e02d319f5ad4b85b074846bf4fef8a8cf51e7bb"}, + {file = "bcrypt-4.1.2.tar.gz", hash = "sha256:33313a1200a3ae90b75587ceac502b048b840fc69e7f7a0905b5f87fac7a1258"}, ] [package.extras] @@ -191,7 +211,6 @@ typecheck = ["mypy"] name = "billiard" version = "4.2.0" description = "Python multiprocessing fork with improvements and bugfixes" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -201,30 +220,33 @@ files = [ [[package]] name = "black" -version = "23.11.0" +version = "23.12.1" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "black-23.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dbea0bb8575c6b6303cc65017b46351dc5953eea5c0a59d7b7e3a2d2f433a911"}, - {file = "black-23.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:412f56bab20ac85927f3a959230331de5614aecda1ede14b373083f62ec24e6f"}, - {file = "black-23.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d136ef5b418c81660ad847efe0e55c58c8208b77a57a28a503a5f345ccf01394"}, - {file = "black-23.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:6c1cac07e64433f646a9a838cdc00c9768b3c362805afc3fce341af0e6a9ae9f"}, - {file = "black-23.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cf57719e581cfd48c4efe28543fea3d139c6b6f1238b3f0102a9c73992cbb479"}, - {file = "black-23.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:698c1e0d5c43354ec5d6f4d914d0d553a9ada56c85415700b81dc90125aac244"}, - {file = "black-23.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:760415ccc20f9e8747084169110ef75d545f3b0932ee21368f63ac0fee86b221"}, - {file = "black-23.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:58e5f4d08a205b11800332920e285bd25e1a75c54953e05502052738fe16b3b5"}, - {file = "black-23.11.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:45aa1d4675964946e53ab81aeec7a37613c1cb71647b5394779e6efb79d6d187"}, - {file = "black-23.11.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c44b7211a3a0570cc097e81135faa5f261264f4dfaa22bd5ee2875a4e773bd6"}, - {file = "black-23.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a9acad1451632021ee0d146c8765782a0c3846e0e0ea46659d7c4f89d9b212b"}, - {file = "black-23.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:fc7f6a44d52747e65a02558e1d807c82df1d66ffa80a601862040a43ec2e3142"}, - {file = "black-23.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7f622b6822f02bfaf2a5cd31fdb7cd86fcf33dab6ced5185c35f5db98260b055"}, - {file = "black-23.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:250d7e60f323fcfc8ea6c800d5eba12f7967400eb6c2d21ae85ad31c204fb1f4"}, - {file = "black-23.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5133f5507007ba08d8b7b263c7aa0f931af5ba88a29beacc4b2dc23fcefe9c06"}, - {file = "black-23.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:421f3e44aa67138ab1b9bfbc22ee3780b22fa5b291e4db8ab7eee95200726b07"}, - {file = "black-23.11.0-py3-none-any.whl", hash = "sha256:54caaa703227c6e0c87b76326d0862184729a69b73d3b7305b6288e1d830067e"}, - {file = "black-23.11.0.tar.gz", hash = "sha256:4c68855825ff432d197229846f971bc4d6666ce90492e5b02013bcaca4d9ab05"}, + {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, + {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, + {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, + {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, + {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, + {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, + {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, + {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, + {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, + {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, + {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, + {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, + {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, + {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, + {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, + {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, + {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, + {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, + {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, + {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, + {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, + {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, ] [package.dependencies] @@ -238,20 +260,19 @@ typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "celery" -version = "5.3.5" +version = "5.3.6" description = "Distributed Task Queue." -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "celery-5.3.5-py3-none-any.whl", hash = "sha256:30b75ac60fb081c2d9f8881382c148ed7c9052031a75a1e8743ff4b4b071f184"}, - {file = "celery-5.3.5.tar.gz", hash = "sha256:6b65d8dd5db499dd6190c45aa6398e171b99592f2af62c312f7391587feb5458"}, + {file = "celery-5.3.6-py3-none-any.whl", hash = "sha256:9da4ea0118d232ce97dff5ed4974587fb1c0ff5c10042eb15278487cdd27d1af"}, + {file = "celery-5.3.6.tar.gz", hash = "sha256:870cc71d737c0200c397290d730344cc991d13a057534353d124c9380267aab9"}, ] [package.dependencies] @@ -261,7 +282,7 @@ click = ">=8.1.2,<9.0" click-didyoumean = ">=0.3.0" click-plugins = ">=1.1.1" click-repl = ">=0.2.0" -kombu = ">=5.3.3,<6.0" +kombu = ">=5.3.4,<6.0" python-dateutil = ">=2.8.2" tzdata = ">=2022.7" vine = ">=5.1.0,<6.0" @@ -278,7 +299,7 @@ couchbase = ["couchbase (>=3.0.0)"] couchdb = ["pycouchdb (==1.14.2)"] django = ["Django (>=2.2.28)"] dynamodb = ["boto3 (>=1.26.143)"] -elasticsearch = ["elastic-transport (<=8.10.0)", "elasticsearch (<=8.10.1)"] +elasticsearch = ["elastic-transport (<=8.10.0)", "elasticsearch (<=8.11.0)"] eventlet = ["eventlet (>=0.32.0)"] gevent = ["gevent (>=1.5.0)"] librabbitmq = ["librabbitmq (>=2.0.0)"] @@ -303,7 +324,6 @@ zstd = ["zstandard (==0.22.0)"] name = "certifi" version = "2023.11.17" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -315,7 +335,6 @@ files = [ name = "cffi" version = "1.16.0" description = "Foreign Function Interface for Python calling C code." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -380,7 +399,6 @@ pycparser = "*" name = "charset-normalizer" version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -480,7 +498,6 @@ files = [ name = "click" version = "8.1.7" description = "Composable command line interface toolkit" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -495,7 +512,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "click-didyoumean" version = "0.3.0" description = "Enables git-like *did-you-mean* feature in click" -category = "main" optional = false python-versions = ">=3.6.2,<4.0.0" files = [ @@ -510,7 +526,6 @@ click = ">=7" name = "click-plugins" version = "1.1.1" description = "An extension module for click to enable registering CLI commands via setuptools entry-points." -category = "main" optional = false python-versions = "*" files = [ @@ -528,7 +543,6 @@ dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] name = "click-repl" version = "0.3.0" description = "REPL plugin for Click" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -547,7 +561,6 @@ testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -557,64 +570,63 @@ files = [ [[package]] name = "coverage" -version = "7.3.2" +version = "7.4.0" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"}, - {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"}, - {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"}, - {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"}, - {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"}, - {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"}, - {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"}, - {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"}, - {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"}, - {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"}, - {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"}, - {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"}, - {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"}, - {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"}, - {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"}, - {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"}, - {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"}, - {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"}, - {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"}, - {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"}, - {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"}, - {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"}, - {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"}, - {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"}, - {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"}, - {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"}, - {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"}, - {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"}, - {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"}, - {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"}, - {file = "coverage-7.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738"}, - {file = "coverage-7.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2"}, - {file = "coverage-7.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2"}, - {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c"}, - {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9"}, - {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82"}, - {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901"}, - {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76"}, - {file = "coverage-7.3.2-cp38-cp38-win32.whl", hash = "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92"}, - {file = "coverage-7.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a"}, - {file = "coverage-7.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce"}, - {file = "coverage-7.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9"}, - {file = "coverage-7.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f"}, - {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25"}, - {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9"}, - {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6"}, - {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc"}, - {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083"}, - {file = "coverage-7.3.2-cp39-cp39-win32.whl", hash = "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce"}, - {file = "coverage-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f"}, - {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"}, - {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"}, + {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, + {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, + {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, + {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, + {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, + {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, + {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, + {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, + {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, + {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, + {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, + {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, + {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, + {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, + {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, + {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, + {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, + {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, ] [package.extras] @@ -624,7 +636,6 @@ toml = ["tomli"] name = "cron-descriptor" version = "1.4.0" description = "A Python library that converts cron expressions into human readable strings." -category = "main" optional = false python-versions = "*" files = [ @@ -636,35 +647,34 @@ dev = ["polib"] [[package]] name = "cryptography" -version = "41.0.5" +version = "41.0.7" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797"}, - {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20"}, - {file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548"}, - {file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d"}, - {file = "cryptography-41.0.5-cp37-abi3-win32.whl", hash = "sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936"}, - {file = "cryptography-41.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84"}, - {file = "cryptography-41.0.5.tar.gz", hash = "sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7"}, + {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, + {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, + {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, + {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, + {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, + {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, + {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, ] [package.dependencies] @@ -684,7 +694,6 @@ test-randomorder = ["pytest-randomly"] name = "defusedxml" version = "0.7.1" description = "XML bomb protection for Python stdlib modules" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -692,11 +701,31 @@ files = [ {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, ] +[[package]] +name = "diffsync" +version = "1.10.0" +description = "Library to easily sync/diff/update 2 different data sources" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "diffsync-1.10.0-py3-none-any.whl", hash = "sha256:f4368c97162d51eecc7a8e87026c731197a694026cabcf2ab4f16d18d7bdadbd"}, + {file = "diffsync-1.10.0.tar.gz", hash = "sha256:a9d7cb8e8ce983b446bf858c1c5c82cf473fcf231db73c0855e8c59ee7cd8370"}, +] + +[package.dependencies] +colorama = ">=0.4.3,<0.5.0" +packaging = ">=21.3,<24.0" +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0" +structlog = ">=20.1.0,<23.0.0" +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +redis = ["redis (>=4.3,<5.0)"] + [[package]] name = "dill" version = "0.3.7" description = "serialize all of Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -711,7 +740,6 @@ graph = ["objgraph (>=1.7.2)"] name = "django" version = "3.2.23" description = "A high-level Python Web framework that encourages rapid development and clean, pragmatic design." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -732,7 +760,6 @@ bcrypt = ["bcrypt"] name = "django-ajax-tables" version = "1.1.1" description = "Django tag for ajax-enabled tables" -category = "main" optional = false python-versions = "*" files = [ @@ -744,7 +771,6 @@ files = [ name = "django-celery-beat" version = "2.5.0" description = "Database-backed Periodic Tasks." -category = "main" optional = false python-versions = "*" files = [ @@ -765,7 +791,6 @@ tzdata = "*" name = "django-celery-results" version = "2.4.0" description = "Celery result backends for Django." -category = "main" optional = false python-versions = "*" files = [ @@ -780,7 +805,6 @@ celery = ">=5.2.3,<6.0" name = "django-constance" version = "2.9.1" description = "Django live settings with pluggable backends, including Redis." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -799,7 +823,6 @@ redis = ["redis"] name = "django-cors-headers" version = "4.2.0" description = "django-cors-headers is a Django application for handling the server headers required for Cross-Origin Resource Sharing (CORS)." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -814,7 +837,6 @@ Django = ">=3.2" name = "django-db-file-storage" version = "0.5.5" description = "Custom FILE_STORAGE for Django. Saves files in your database instead of your file system." -category = "main" optional = false python-versions = "*" files = [ @@ -828,7 +850,6 @@ Django = "*" name = "django-debug-toolbar" version = "4.2.0" description = "A configurable set of panels that display various debug information about the current request/response." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -844,7 +865,6 @@ sqlparse = ">=0.2" name = "django-extensions" version = "3.2.3" description = "Extensions for Django" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -859,7 +879,6 @@ Django = ">=3.2" name = "django-filter" version = "23.1" description = "Django-filter is a reusable Django application for allowing users to filter querysets dynamically." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -874,7 +893,6 @@ Django = ">=3.2" name = "django-health-check" version = "3.17.0" description = "Run checks on services like databases, queue servers, celery processes, etc." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -893,7 +911,6 @@ test = ["celery", "pytest", "pytest-cov", "pytest-django", "redis"] name = "django-jinja" version = "2.10.2" description = "Jinja2 templating language integrated in Django." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -909,7 +926,6 @@ jinja2 = ">=3" name = "django-picklefield" version = "3.1" description = "Pickled object field for Django" -category = "main" optional = false python-versions = ">=3" files = [ @@ -927,7 +943,6 @@ tests = ["tox"] name = "django-prometheus" version = "2.3.1" description = "Django middlewares to monitor your application with Prometheus.io." -category = "main" optional = false python-versions = "*" files = [ @@ -942,7 +957,6 @@ prometheus-client = ">=0.7" name = "django-redis" version = "5.3.0" description = "Full featured redis cache backend for Django." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -961,7 +975,6 @@ hiredis = ["redis[hiredis] (>=3,!=4.0.0,!=4.0.1)"] name = "django-tables2" version = "2.6.0" description = "Table/data-grid framework for Django" -category = "main" optional = false python-versions = "*" files = [ @@ -979,7 +992,6 @@ tablib = ["tablib"] name = "django-taggit" version = "4.0.0" description = "django-taggit is a reusable Django application for simple tagging." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -994,7 +1006,6 @@ Django = ">=3.2" name = "django-timezone-field" version = "5.1" description = "A Django app providing DB, form, and REST framework fields for zoneinfo and pytz timezone objects." -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -1004,19 +1015,18 @@ files = [ [package.dependencies] "backports.zoneinfo" = {version = ">=0.2.1,<0.3.0", markers = "python_version < \"3.9\""} -Django = ">=2.2,<3.0.0 || >=3.2.0,<5.0" +Django = ">=2.2,<3.0.dev0 || >=3.2.dev0,<5.0" pytz = "*" [[package]] name = "django-tree-queries" -version = "0.15.0" +version = "0.16.1" description = "Tree queries with explicit opt-in, without configurability" -category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "django_tree_queries-0.15.0-py3-none-any.whl", hash = "sha256:cf11340de59d3122919fde46e99966bad40ff942df768d683383b111554134a1"}, - {file = "django_tree_queries-0.15.0.tar.gz", hash = "sha256:0e994c2a4601c021a115a397ec8d0ff7d5e614fae95947f72126e6a419c60f08"}, + {file = "django_tree_queries-0.16.1-py3-none-any.whl", hash = "sha256:b57cebd85136897dc2d7d1da50f3944b13d4713009af655ae221c8202146c2f5"}, + {file = "django_tree_queries-0.16.1.tar.gz", hash = "sha256:5a7765bdbc78742ae7b206348aa674a7e39ef38069ac3854a51b330d25081c43"}, ] [package.extras] @@ -1026,7 +1036,6 @@ tests = ["coverage"] name = "django-webserver" version = "1.2.0" description = "Django management commands for production webservers" -category = "main" optional = false python-versions = "*" files = [ @@ -1048,7 +1057,6 @@ waitress = ["waitress"] name = "djangorestframework" version = "3.14.0" description = "Web APIs for Django, made easy." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1064,7 +1072,6 @@ pytz = "*" name = "drf-react-template-framework" version = "0.0.17" description = "Django REST Framework plugin that creates form schemas for react-jsonschema-form" -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -1079,7 +1086,6 @@ djangorestframework = ">=3.12.0,<4.0.0" name = "drf-spectacular" version = "0.26.3" description = "Sane and flexible OpenAPI 3 schema generation for Django REST framework" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1102,14 +1108,13 @@ sidecar = ["drf-spectacular-sidecar"] [[package]] name = "drf-spectacular-sidecar" -version = "2023.10.1" +version = "2024.1.1" description = "Serve self-contained distribution builds of Swagger UI and Redoc with Django" -category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "drf-spectacular-sidecar-2023.10.1.tar.gz", hash = "sha256:546a83c173589715e530fad211af60cbcda2db54eb9e0935d44251639332af6d"}, - {file = "drf_spectacular_sidecar-2023.10.1-py3-none-any.whl", hash = "sha256:3d042a6772512f4d238f0385d3430acf5f669f595fd0be2641fe6bbfb4c7b376"}, + {file = "drf-spectacular-sidecar-2024.1.1.tar.gz", hash = "sha256:099ec58b6af6a90e851a9329b12a57aa1ee7daa6cef62fb504f2ed302f10da76"}, + {file = "drf_spectacular_sidecar-2024.1.1-py3-none-any.whl", hash = "sha256:4b9e33b4dcfa43f84e3db2659d31766a018a2b98b02d8856d9cd69580a4911c9"}, ] [package.dependencies] @@ -1119,7 +1124,6 @@ Django = ">=2.2" name = "emoji" version = "2.8.0" description = "Emoji for Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1130,11 +1134,24 @@ files = [ [package.extras] dev = ["coverage", "coveralls", "pytest"] +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + [[package]] name = "flake8" version = "5.0.4" description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" optional = false python-versions = ">=3.6.1" files = [ @@ -1151,7 +1168,6 @@ pyflakes = ">=2.5.0,<2.6.0" name = "future" version = "0.18.3" description = "Clean single-source support for Python 3 and 2" -category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1162,7 +1178,6 @@ files = [ name = "ghp-import" version = "2.1.0" description = "Copy your docs directly to the gh-pages branch." -category = "dev" optional = false python-versions = "*" files = [ @@ -1180,7 +1195,6 @@ dev = ["flake8", "markdown", "twine", "wheel"] name = "gitdb" version = "4.0.11" description = "Git Object Database" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1193,27 +1207,25 @@ smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.40" +version = "3.1.41" description = "GitPython is a Python library used to interact with Git repositories" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "GitPython-3.1.40-py3-none-any.whl", hash = "sha256:cf14627d5a8049ffbf49915732e5eddbe8134c3bdb9d476e6182b676fc573f8a"}, - {file = "GitPython-3.1.40.tar.gz", hash = "sha256:22b126e9ffb671fdd0c129796343a02bf67bf2994b35449ffc9321aa755e18a4"}, + {file = "GitPython-3.1.41-py3-none-any.whl", hash = "sha256:c36b6634d069b3f719610175020a9aed919421c87552185b085e04fbbdb10b7c"}, + {file = "GitPython-3.1.41.tar.gz", hash = "sha256:ed66e624884f76df22c8e16066d567aaa5a37d5b5fa19db2c6df6f7156db9048"}, ] [package.dependencies] gitdb = ">=4.0.1,<5" [package.extras] -test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest", "pytest-cov", "pytest-instafail", "pytest-subtests", "pytest-sugar"] +test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "sumtypes"] [[package]] name = "graphene" version = "2.1.9" description = "GraphQL Framework for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -1236,7 +1248,6 @@ test = ["coveralls", "fastdiff (==0.2.0)", "iso8601", "mock", "promise", "pytest name = "graphene-django" version = "2.16.0" description = "Graphene Django integration" -category = "main" optional = false python-versions = "*" files = [ @@ -1261,7 +1272,6 @@ test = ["coveralls", "django-filter (>=2)", "djangorestframework (>=3.6.3)", "mo name = "graphene-django-optimizer" version = "0.8.0" description = "Optimize database access inside graphene queries." -category = "main" optional = false python-versions = "*" files = [ @@ -1272,7 +1282,6 @@ files = [ name = "graphql-core" version = "2.3.2" description = "GraphQL implementation for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -1293,7 +1302,6 @@ test = ["coveralls (==1.11.1)", "cython (==0.29.17)", "gevent (==1.5.0)", "pyann name = "graphql-relay" version = "2.0.1" description = "Relay implementation for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -1308,56 +1316,107 @@ six = ">=1.12" [[package]] name = "griffe" -version = "0.38.0" +version = "0.39.1" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." -category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "griffe-0.38.0-py3-none-any.whl", hash = "sha256:6a5bc457320e8e199006aa5fbb03e162f5e21abe31aa6221f7a5c37ea0724c71"}, - {file = "griffe-0.38.0.tar.gz", hash = "sha256:9b97487b583042b543d1e28196caee638ecd766c8c4c98135071806cb5333ac2"}, + {file = "griffe-0.39.1-py3-none-any.whl", hash = "sha256:6ce4ecffcf0d2f96362c5974b3f7df812da8f8d4cfcc5ebc8202ef72656fc087"}, + {file = "griffe-0.39.1.tar.gz", hash = "sha256:ead8dfede6e6531cce6bf69090a4f3c6d36fdf923c43f8e85aa530552cef0c09"}, ] [package.dependencies] colorama = ">=0.4" +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "0.17.3" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.7" +files = [ + {file = "httpcore-0.17.3-py3-none-any.whl", hash = "sha256:c2789b767ddddfa2a5782e3199b2b7f6894540b17b16ec26b2c4d8e103510b87"}, + {file = "httpcore-0.17.3.tar.gz", hash = "sha256:a6f30213335e34c1ade7be6ec7c47f19f50c56db36abef1a9dfa3815b1cb3888"}, +] + +[package.dependencies] +anyio = ">=3.0,<5.0" +certifi = "*" +h11 = ">=0.13,<0.15" +sniffio = "==1.*" + +[package.extras] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + +[[package]] +name = "httpx" +version = "0.24.1" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.7" +files = [ + {file = "httpx-0.24.1-py3-none-any.whl", hash = "sha256:06781eb9ac53cde990577af654bd990a4949de37a28bdb4a230d434f3a30b9bd"}, + {file = "httpx-0.24.1.tar.gz", hash = "sha256:5853a43053df830c20f8110c5e69fe44d035d850b2dfe795e196f00fdb774bdd"}, +] + +[package.dependencies] +certifi = "*" +httpcore = ">=0.15.0,<0.18.0" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + [[package]] name = "idna" -version = "3.4" +version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, ] [[package]] name = "importlib-metadata" -version = "6.8.0" +version = "4.13.0" description = "Read metadata from Python packages" -category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"}, - {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"}, + {file = "importlib_metadata-4.13.0-py3-none-any.whl", hash = "sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116"}, + {file = "importlib_metadata-4.13.0.tar.gz", hash = "sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] [[package]] name = "importlib-resources" version = "5.13.0" description = "Read resources from Python packages" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1376,7 +1435,6 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", name = "inflection" version = "0.5.1" description = "A port of Ruby on Rails inflector to Python" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1388,7 +1446,6 @@ files = [ name = "invoke" version = "2.2.0" description = "Pythonic task execution" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1398,32 +1455,27 @@ files = [ [[package]] name = "isort" -version = "5.12.0" +version = "5.13.2" description = "A Python utility / library to sort Python imports." -category = "dev" optional = false python-versions = ">=3.8.0" files = [ - {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, - {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, ] [package.extras] -colors = ["colorama (>=0.4.3)"] -pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] -plugins = ["setuptools"] -requirements-deprecated-finder = ["pip-api", "pipreqs"] +colors = ["colorama (>=0.4.6)"] [[package]] name = "jinja2" -version = "3.1.2" +version = "3.1.3" description = "A very fast and expressive template engine." -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, ] [package.dependencies] @@ -1436,7 +1488,6 @@ i18n = ["Babel (>=2.7)"] name = "jsonschema" version = "4.18.6" description = "An implementation of JSON Schema validation for Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1458,14 +1509,13 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- [[package]] name = "jsonschema-specifications" -version = "2023.11.1" +version = "2023.12.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema_specifications-2023.11.1-py3-none-any.whl", hash = "sha256:f596778ab612b3fd29f72ea0d990393d0540a5aab18bf0407a46632eab540779"}, - {file = "jsonschema_specifications-2023.11.1.tar.gz", hash = "sha256:c9b234904ffe02f079bf91b14d79987faa685fd4b39c377a0996954c0090b9ca"}, + {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, + {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, ] [package.dependencies] @@ -1474,20 +1524,19 @@ referencing = ">=0.31.0" [[package]] name = "junos-eznc" -version = "2.6.8" +version = "2.7.0" description = "Junos 'EZ' automation for non-programmers" -category = "main" optional = false -python-versions = ">=3.5, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.8" files = [ - {file = "junos-eznc-2.6.8.tar.gz", hash = "sha256:80772346552225b78b6e9812bc791f67735b7e76e753dea5b7cfe888ef40e0a1"}, - {file = "junos_eznc-2.6.8-py2.py3-none-any.whl", hash = "sha256:0a62b1358d69a1eb8cdf13fee777e91b850da44a3ecf4b02c75995f5bde66f21"}, + {file = "junos-eznc-2.7.0.tar.gz", hash = "sha256:a45c90641d24ff4c86796418ea76ca64066c06d0bf644d6b77e605bf957c5c7d"}, + {file = "junos_eznc-2.7.0-py2.py3-none-any.whl", hash = "sha256:27a665957b49cf4caec2047e33b1b62f3a3ece72a244d0b98e93df9c26c984a6"}, ] [package.dependencies] jinja2 = ">=2.7.1" lxml = ">=3.2.4" -ncclient = "0.6.13" +ncclient = ">=0.6.15" paramiko = ">=1.15.2" pyparsing = "*" pyserial = "*" @@ -1499,14 +1548,13 @@ yamlordereddictloader = "*" [[package]] name = "kombu" -version = "5.3.4" +version = "5.3.5" description = "Messaging library for Python." -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "kombu-5.3.4-py3-none-any.whl", hash = "sha256:63bb093fc9bb80cfb3a0972336a5cec1fa7ac5f9ef7e8237c6bf8dda9469313e"}, - {file = "kombu-5.3.4.tar.gz", hash = "sha256:0bb2e278644d11dea6272c17974a3dbb9688a949f3bb60aeb5b791329c44fadc"}, + {file = "kombu-5.3.5-py3-none-any.whl", hash = "sha256:0eac1bbb464afe6fb0924b21bf79460416d25d8abc52546d4f16cad94f789488"}, + {file = "kombu-5.3.5.tar.gz", hash = "sha256:30e470f1a6b49c70dc6f6d13c3e4cc4e178aa6c469ceb6bcd55645385fc84b93"}, ] [package.dependencies] @@ -1534,163 +1582,147 @@ zookeeper = ["kazoo (>=2.8.0)"] [[package]] name = "lazy-object-proxy" -version = "1.9.0" +version = "1.10.0" description = "A fast and thorough lazy object proxy." -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "lazy-object-proxy-1.9.0.tar.gz", hash = "sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-win32.whl", hash = "sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-win32.whl", hash = "sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win32.whl", hash = "sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-win32.whl", hash = "sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-win32.whl", hash = "sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"}, + {file = "lazy-object-proxy-1.10.0.tar.gz", hash = "sha256:78247b6d45f43a52ef35c25b5581459e85117225408a4128a3daf8bf9648ac69"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:855e068b0358ab916454464a884779c7ffa312b8925c6f7401e952dcf3b89977"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab7004cf2e59f7c2e4345604a3e6ea0d92ac44e1c2375527d56492014e690c3"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc0d2fc424e54c70c4bc06787e4072c4f3b1aa2f897dfdc34ce1013cf3ceef05"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e2adb09778797da09d2b5ebdbceebf7dd32e2c96f79da9052b2e87b6ea495895"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1f711e2c6dcd4edd372cf5dec5c5a30d23bba06ee012093267b3376c079ec83"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-win32.whl", hash = "sha256:76a095cfe6045c7d0ca77db9934e8f7b71b14645f0094ffcd842349ada5c5fb9"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:b4f87d4ed9064b2628da63830986c3d2dca7501e6018347798313fcf028e2fd4"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fec03caabbc6b59ea4a638bee5fce7117be8e99a4103d9d5ad77f15d6f81020c"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02c83f957782cbbe8136bee26416686a6ae998c7b6191711a04da776dc9e47d4"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009e6bb1f1935a62889ddc8541514b6a9e1fcf302667dcb049a0be5c8f613e56"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75fc59fc450050b1b3c203c35020bc41bd2695ed692a392924c6ce180c6f1dc9"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:782e2c9b2aab1708ffb07d4bf377d12901d7a1d99e5e410d648d892f8967ab1f"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-win32.whl", hash = "sha256:edb45bb8278574710e68a6b021599a10ce730d156e5b254941754a9cc0b17d03"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:e271058822765ad5e3bca7f05f2ace0de58a3f4e62045a8c90a0dfd2f8ad8cc6"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e98c8af98d5707dcdecc9ab0863c0ea6e88545d42ca7c3feffb6b4d1e370c7ba"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:952c81d415b9b80ea261d2372d2a4a2332a3890c2b83e0535f263ddfe43f0d43"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80b39d3a151309efc8cc48675918891b865bdf742a8616a337cb0090791a0de9"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e221060b701e2aa2ea991542900dd13907a5c90fa80e199dbf5a03359019e7a3"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:92f09ff65ecff3108e56526f9e2481b8116c0b9e1425325e13245abfd79bdb1b"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-win32.whl", hash = "sha256:3ad54b9ddbe20ae9f7c1b29e52f123120772b06dbb18ec6be9101369d63a4074"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:127a789c75151db6af398b8972178afe6bda7d6f68730c057fbbc2e96b08d282"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4ed0518a14dd26092614412936920ad081a424bdcb54cc13349a8e2c6d106a"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ad9e6ed739285919aa9661a5bbed0aaf410aa60231373c5579c6b4801bd883c"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc0a92c02fa1ca1e84fc60fa258458e5bf89d90a1ddaeb8ed9cc3147f417255"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0aefc7591920bbd360d57ea03c995cebc204b424524a5bd78406f6e1b8b2a5d8"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5faf03a7d8942bb4476e3b62fd0f4cf94eaf4618e304a19865abf89a35c0bbee"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-win32.whl", hash = "sha256:e333e2324307a7b5d86adfa835bb500ee70bfcd1447384a822e96495796b0ca4"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:cb73507defd385b7705c599a94474b1d5222a508e502553ef94114a143ec6696"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:366c32fe5355ef5fc8a232c5436f4cc66e9d3e8967c01fb2e6302fd6627e3d94"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2297f08f08a2bb0d32a4265e98a006643cd7233fb7983032bd61ac7a02956b3b"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18dd842b49456aaa9a7cf535b04ca4571a302ff72ed8740d06b5adcd41fe0757"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:217138197c170a2a74ca0e05bddcd5f1796c735c37d0eee33e43259b192aa424"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a3a87cf1e133e5b1994144c12ca4aa3d9698517fe1e2ca82977781b16955658"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-win32.whl", hash = "sha256:30b339b2a743c5288405aa79a69e706a06e02958eab31859f7f3c04980853b70"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:a899b10e17743683b293a729d3a11f2f399e8a90c73b089e29f5d0fe3509f0dd"}, + {file = "lazy_object_proxy-1.10.0-pp310.pp311.pp312.pp38.pp39-none-any.whl", hash = "sha256:80fa48bd89c8f2f456fc0765c11c23bf5af827febacd2f523ca5bc1893fcc09d"}, ] [[package]] name = "lxml" -version = "4.9.3" +version = "5.1.0" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" -files = [ - {file = "lxml-4.9.3-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c"}, - {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d"}, - {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef"}, - {file = "lxml-4.9.3-cp27-cp27m-win32.whl", hash = "sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7"}, - {file = "lxml-4.9.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1"}, - {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb"}, - {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e"}, - {file = "lxml-4.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76"}, - {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23"}, - {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1247694b26342a7bf47c02e513d32225ededd18045264d40758abeb3c838a51f"}, - {file = "lxml-4.9.3-cp310-cp310-win32.whl", hash = "sha256:cdb650fc86227eba20de1a29d4b2c1bfe139dc75a0669270033cb2ea3d391b85"}, - {file = "lxml-4.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d"}, - {file = "lxml-4.9.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b"}, - {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120"}, - {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e28c51fa0ce5674be9f560c6761c1b441631901993f76700b1b30ca6c8378d6"}, - {file = "lxml-4.9.3-cp311-cp311-win32.whl", hash = "sha256:0bfd0767c5c1de2551a120673b72e5d4b628737cb05414f03c3277bf9bed3305"}, - {file = "lxml-4.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:25f32acefac14ef7bd53e4218fe93b804ef6f6b92ffdb4322bb6d49d94cad2bc"}, - {file = "lxml-4.9.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:d3ff32724f98fbbbfa9f49d82852b159e9784d6094983d9a8b7f2ddaebb063d4"}, - {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48d6ed886b343d11493129e019da91d4039826794a3e3027321c56d9e71505be"}, - {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9a92d3faef50658dd2c5470af249985782bf754c4e18e15afb67d3ab06233f13"}, - {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b4e4bc18382088514ebde9328da057775055940a1f2e18f6ad2d78aa0f3ec5b9"}, - {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fc9b106a1bf918db68619fdcd6d5ad4f972fdd19c01d19bdb6bf63f3589a9ec5"}, - {file = "lxml-4.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:d37017287a7adb6ab77e1c5bee9bcf9660f90ff445042b790402a654d2ad81d8"}, - {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:56dc1f1ebccc656d1b3ed288f11e27172a01503fc016bcabdcbc0978b19352b7"}, - {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:578695735c5a3f51569810dfebd05dd6f888147a34f0f98d4bb27e92b76e05c2"}, - {file = "lxml-4.9.3-cp35-cp35m-win32.whl", hash = "sha256:704f61ba8c1283c71b16135caf697557f5ecf3e74d9e453233e4771d68a1f42d"}, - {file = "lxml-4.9.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c41bfca0bd3532d53d16fd34d20806d5c2b1ace22a2f2e4c0008570bf2c58833"}, - {file = "lxml-4.9.3-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584"}, - {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0c0850c8b02c298d3c7006b23e98249515ac57430e16a166873fc47a5d549287"}, - {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aca086dc5f9ef98c512bac8efea4483eb84abbf926eaeedf7b91479feb092458"}, - {file = "lxml-4.9.3-cp36-cp36m-win32.whl", hash = "sha256:50baa9c1c47efcaef189f31e3d00d697c6d4afda5c3cde0302d063492ff9b477"}, - {file = "lxml-4.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4"}, - {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:081d32421db5df44c41b7f08a334a090a545c54ba977e47fd7cc2deece78809a"}, - {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:23eed6d7b1a3336ad92d8e39d4bfe09073c31bfe502f20ca5116b2a334f8ec02"}, - {file = "lxml-4.9.3-cp37-cp37m-win32.whl", hash = "sha256:1509dd12b773c02acd154582088820893109f6ca27ef7291b003d0e81666109f"}, - {file = "lxml-4.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa"}, - {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3e9bdd30efde2b9ccfa9cb5768ba04fe71b018a25ea093379c857c9dad262c40"}, - {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fcdd00edfd0a3001e0181eab3e63bd5c74ad3e67152c84f93f13769a40e073a7"}, - {file = "lxml-4.9.3-cp38-cp38-win32.whl", hash = "sha256:57aba1bbdf450b726d58b2aea5fe47c7875f5afb2c4a23784ed78f19a0462574"}, - {file = "lxml-4.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96"}, - {file = "lxml-4.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6420a005548ad52154c8ceab4a1290ff78d757f9e5cbc68f8c77089acd3c432"}, - {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bb3bb49c7a6ad9d981d734ef7c7193bc349ac338776a0360cc671eaee89bcf69"}, - {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d27be7405547d1f958b60837dc4c1007da90b8b23f54ba1f8b728c78fdb19d50"}, - {file = "lxml-4.9.3-cp39-cp39-win32.whl", hash = "sha256:8df133a2ea5e74eef5e8fc6f19b9e085f758768a16e9877a60aec455ed2609b2"}, - {file = "lxml-4.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2"}, - {file = "lxml-4.9.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35"}, - {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0"}, - {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3"}, - {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9"}, - {file = "lxml-4.9.3.tar.gz", hash = "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c"}, +optional = false +python-versions = ">=3.6" +files = [ + {file = "lxml-5.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:704f5572ff473a5f897745abebc6df40f22d4133c1e0a1f124e4f2bd3330ff7e"}, + {file = "lxml-5.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9d3c0f8567ffe7502d969c2c1b809892dc793b5d0665f602aad19895f8d508da"}, + {file = "lxml-5.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5fcfbebdb0c5d8d18b84118842f31965d59ee3e66996ac842e21f957eb76138c"}, + {file = "lxml-5.1.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f37c6d7106a9d6f0708d4e164b707037b7380fcd0b04c5bd9cae1fb46a856fb"}, + {file = "lxml-5.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2befa20a13f1a75c751f47e00929fb3433d67eb9923c2c0b364de449121f447c"}, + {file = "lxml-5.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22b7ee4c35f374e2c20337a95502057964d7e35b996b1c667b5c65c567d2252a"}, + {file = "lxml-5.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bf8443781533b8d37b295016a4b53c1494fa9a03573c09ca5104550c138d5c05"}, + {file = "lxml-5.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:82bddf0e72cb2af3cbba7cec1d2fd11fda0de6be8f4492223d4a268713ef2147"}, + {file = "lxml-5.1.0-cp310-cp310-win32.whl", hash = "sha256:b66aa6357b265670bb574f050ffceefb98549c721cf28351b748be1ef9577d93"}, + {file = "lxml-5.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:4946e7f59b7b6a9e27bef34422f645e9a368cb2be11bf1ef3cafc39a1f6ba68d"}, + {file = "lxml-5.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:14deca1460b4b0f6b01f1ddc9557704e8b365f55c63070463f6c18619ebf964f"}, + {file = "lxml-5.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ed8c3d2cd329bf779b7ed38db176738f3f8be637bb395ce9629fc76f78afe3d4"}, + {file = "lxml-5.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:436a943c2900bb98123b06437cdd30580a61340fbdb7b28aaf345a459c19046a"}, + {file = "lxml-5.1.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:acb6b2f96f60f70e7f34efe0c3ea34ca63f19ca63ce90019c6cbca6b676e81fa"}, + {file = "lxml-5.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af8920ce4a55ff41167ddbc20077f5698c2e710ad3353d32a07d3264f3a2021e"}, + {file = "lxml-5.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cfced4a069003d8913408e10ca8ed092c49a7f6cefee9bb74b6b3e860683b45"}, + {file = "lxml-5.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9e5ac3437746189a9b4121db2a7b86056ac8786b12e88838696899328fc44bb2"}, + {file = "lxml-5.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f4c9bda132ad108b387c33fabfea47866af87f4ea6ffb79418004f0521e63204"}, + {file = "lxml-5.1.0-cp311-cp311-win32.whl", hash = "sha256:bc64d1b1dab08f679fb89c368f4c05693f58a9faf744c4d390d7ed1d8223869b"}, + {file = "lxml-5.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:a5ab722ae5a873d8dcee1f5f45ddd93c34210aed44ff2dc643b5025981908cda"}, + {file = "lxml-5.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9aa543980ab1fbf1720969af1d99095a548ea42e00361e727c58a40832439114"}, + {file = "lxml-5.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6f11b77ec0979f7e4dc5ae081325a2946f1fe424148d3945f943ceaede98adb8"}, + {file = "lxml-5.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a36c506e5f8aeb40680491d39ed94670487ce6614b9d27cabe45d94cd5d63e1e"}, + {file = "lxml-5.1.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f643ffd2669ffd4b5a3e9b41c909b72b2a1d5e4915da90a77e119b8d48ce867a"}, + {file = "lxml-5.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16dd953fb719f0ffc5bc067428fc9e88f599e15723a85618c45847c96f11f431"}, + {file = "lxml-5.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16018f7099245157564d7148165132c70adb272fb5a17c048ba70d9cc542a1a1"}, + {file = "lxml-5.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:82cd34f1081ae4ea2ede3d52f71b7be313756e99b4b5f829f89b12da552d3aa3"}, + {file = "lxml-5.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:19a1bc898ae9f06bccb7c3e1dfd73897ecbbd2c96afe9095a6026016e5ca97b8"}, + {file = "lxml-5.1.0-cp312-cp312-win32.whl", hash = "sha256:13521a321a25c641b9ea127ef478b580b5ec82aa2e9fc076c86169d161798b01"}, + {file = "lxml-5.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:1ad17c20e3666c035db502c78b86e58ff6b5991906e55bdbef94977700c72623"}, + {file = "lxml-5.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:24ef5a4631c0b6cceaf2dbca21687e29725b7c4e171f33a8f8ce23c12558ded1"}, + {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d2900b7f5318bc7ad8631d3d40190b95ef2aa8cc59473b73b294e4a55e9f30f"}, + {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:601f4a75797d7a770daed8b42b97cd1bb1ba18bd51a9382077a6a247a12aa38d"}, + {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4b68c961b5cc402cbd99cca5eb2547e46ce77260eb705f4d117fd9c3f932b95"}, + {file = "lxml-5.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:afd825e30f8d1f521713a5669b63657bcfe5980a916c95855060048b88e1adb7"}, + {file = "lxml-5.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:262bc5f512a66b527d026518507e78c2f9c2bd9eb5c8aeeb9f0eb43fcb69dc67"}, + {file = "lxml-5.1.0-cp36-cp36m-win32.whl", hash = "sha256:e856c1c7255c739434489ec9c8aa9cdf5179785d10ff20add308b5d673bed5cd"}, + {file = "lxml-5.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:c7257171bb8d4432fe9d6fdde4d55fdbe663a63636a17f7f9aaba9bcb3153ad7"}, + {file = "lxml-5.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b9e240ae0ba96477682aa87899d94ddec1cc7926f9df29b1dd57b39e797d5ab5"}, + {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a96f02ba1bcd330807fc060ed91d1f7a20853da6dd449e5da4b09bfcc08fdcf5"}, + {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3898ae2b58eeafedfe99e542a17859017d72d7f6a63de0f04f99c2cb125936"}, + {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61c5a7edbd7c695e54fca029ceb351fc45cd8860119a0f83e48be44e1c464862"}, + {file = "lxml-5.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3aeca824b38ca78d9ee2ab82bd9883083d0492d9d17df065ba3b94e88e4d7ee6"}, + {file = "lxml-5.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8f52fe6859b9db71ee609b0c0a70fea5f1e71c3462ecf144ca800d3f434f0764"}, + {file = "lxml-5.1.0-cp37-cp37m-win32.whl", hash = "sha256:d42e3a3fc18acc88b838efded0e6ec3edf3e328a58c68fbd36a7263a874906c8"}, + {file = "lxml-5.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:eac68f96539b32fce2c9b47eb7c25bb2582bdaf1bbb360d25f564ee9e04c542b"}, + {file = "lxml-5.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ae15347a88cf8af0949a9872b57a320d2605ae069bcdf047677318bc0bba45b1"}, + {file = "lxml-5.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c26aab6ea9c54d3bed716b8851c8bfc40cb249b8e9880e250d1eddde9f709bf5"}, + {file = "lxml-5.1.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:342e95bddec3a698ac24378d61996b3ee5ba9acfeb253986002ac53c9a5f6f84"}, + {file = "lxml-5.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:725e171e0b99a66ec8605ac77fa12239dbe061482ac854d25720e2294652eeaa"}, + {file = "lxml-5.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d184e0d5c918cff04cdde9dbdf9600e960161d773666958c9d7b565ccc60c45"}, + {file = "lxml-5.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:98f3f020a2b736566c707c8e034945c02aa94e124c24f77ca097c446f81b01f1"}, + {file = "lxml-5.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d48fc57e7c1e3df57be5ae8614bab6d4e7b60f65c5457915c26892c41afc59e"}, + {file = "lxml-5.1.0-cp38-cp38-win32.whl", hash = "sha256:7ec465e6549ed97e9f1e5ed51c657c9ede767bc1c11552f7f4d022c4df4a977a"}, + {file = "lxml-5.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:b21b4031b53d25b0858d4e124f2f9131ffc1530431c6d1321805c90da78388d1"}, + {file = "lxml-5.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:52427a7eadc98f9e62cb1368a5079ae826f94f05755d2d567d93ee1bc3ceb354"}, + {file = "lxml-5.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6a2a2c724d97c1eb8cf966b16ca2915566a4904b9aad2ed9a09c748ffe14f969"}, + {file = "lxml-5.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:843b9c835580d52828d8f69ea4302537337a21e6b4f1ec711a52241ba4a824f3"}, + {file = "lxml-5.1.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b99f564659cfa704a2dd82d0684207b1aadf7d02d33e54845f9fc78e06b7581"}, + {file = "lxml-5.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f8b0c78e7aac24979ef09b7f50da871c2de2def043d468c4b41f512d831e912"}, + {file = "lxml-5.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bcf86dfc8ff3e992fed847c077bd875d9e0ba2fa25d859c3a0f0f76f07f0c8d"}, + {file = "lxml-5.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:49a9b4af45e8b925e1cd6f3b15bbba2c81e7dba6dce170c677c9cda547411e14"}, + {file = "lxml-5.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:280f3edf15c2a967d923bcfb1f8f15337ad36f93525828b40a0f9d6c2ad24890"}, + {file = "lxml-5.1.0-cp39-cp39-win32.whl", hash = "sha256:ed7326563024b6e91fef6b6c7a1a2ff0a71b97793ac33dbbcf38f6005e51ff6e"}, + {file = "lxml-5.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:8d7b4beebb178e9183138f552238f7e6613162a42164233e2bda00cb3afac58f"}, + {file = "lxml-5.1.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9bd0ae7cc2b85320abd5e0abad5ccee5564ed5f0cc90245d2f9a8ef330a8deae"}, + {file = "lxml-5.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8c1d679df4361408b628f42b26a5d62bd3e9ba7f0c0e7969f925021554755aa"}, + {file = "lxml-5.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2ad3a8ce9e8a767131061a22cd28fdffa3cd2dc193f399ff7b81777f3520e372"}, + {file = "lxml-5.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:304128394c9c22b6569eba2a6d98392b56fbdfbad58f83ea702530be80d0f9df"}, + {file = "lxml-5.1.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d74fcaf87132ffc0447b3c685a9f862ffb5b43e70ea6beec2fb8057d5d2a1fea"}, + {file = "lxml-5.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:8cf5877f7ed384dabfdcc37922c3191bf27e55b498fecece9fd5c2c7aaa34c33"}, + {file = "lxml-5.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:877efb968c3d7eb2dad540b6cabf2f1d3c0fbf4b2d309a3c141f79c7e0061324"}, + {file = "lxml-5.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f14a4fb1c1c402a22e6a341a24c1341b4a3def81b41cd354386dcb795f83897"}, + {file = "lxml-5.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:25663d6e99659544ee8fe1b89b1a8c0aaa5e34b103fab124b17fa958c4a324a6"}, + {file = "lxml-5.1.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8b9f19df998761babaa7f09e6bc169294eefafd6149aaa272081cbddc7ba4ca3"}, + {file = "lxml-5.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e53d7e6a98b64fe54775d23a7c669763451340c3d44ad5e3a3b48a1efbdc96f"}, + {file = "lxml-5.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c3cd1fc1dc7c376c54440aeaaa0dcc803d2126732ff5c6b68ccd619f2e64be4f"}, + {file = "lxml-5.1.0.tar.gz", hash = "sha256:3eea6ed6e6c918e468e693c41ef07f3c3acc310b70ddd9cc72d9ef84bc9564ca"}, ] [package.extras] cssselect = ["cssselect (>=0.7)"] html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=0.29.35)"] +source = ["Cython (>=3.0.7)"] [[package]] name = "markdown" version = "3.3.7" description = "Python implementation of Markdown." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1708,7 +1740,6 @@ testing = ["coverage", "pyyaml"] name = "markdown-it-py" version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1733,7 +1764,6 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "markupsafe" version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1757,6 +1787,16 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -1793,7 +1833,6 @@ files = [ name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1805,7 +1844,6 @@ files = [ name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1817,7 +1855,6 @@ files = [ name = "mergedeep" version = "1.3.4" description = "A deep merge function for 🐍." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1829,7 +1866,6 @@ files = [ name = "mkdocs" version = "1.5.2" description = "Project documentation with Markdown." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1861,7 +1897,6 @@ min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-imp name = "mkdocs-autorefs" version = "0.5.0" description = "Automatically link across pages in MkDocs." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1877,7 +1912,6 @@ mkdocs = ">=1.1" name = "mkdocs-material" version = "9.1.15" description = "Documentation that simply works" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1898,21 +1932,19 @@ requests = ">=2.26" [[package]] name = "mkdocs-material-extensions" -version = "1.3" +version = "1.3.1" description = "Extension pack for Python Markdown and MkDocs Material." -category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_material_extensions-1.3-py3-none-any.whl", hash = "sha256:0297cc48ba68a9fdd1ef3780a3b41b534b0d0df1d1181a44676fda5f464eeadc"}, - {file = "mkdocs_material_extensions-1.3.tar.gz", hash = "sha256:f0446091503acb110a7cab9349cbc90eeac51b58d1caa92a704a81ca1e24ddbd"}, + {file = "mkdocs_material_extensions-1.3.1-py3-none-any.whl", hash = "sha256:adff8b62700b25cb77b53358dad940f3ef973dd6db797907c49e3c2ef3ab4e31"}, + {file = "mkdocs_material_extensions-1.3.1.tar.gz", hash = "sha256:10c9511cea88f568257f960358a467d12b970e1f7b2c0e5fb2bb48cab1928443"}, ] [[package]] name = "mkdocs-version-annotations" version = "1.0.0" description = "MkDocs plugin to add custom admonitions for documenting version differences" -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -1924,7 +1956,6 @@ files = [ name = "mkdocstrings" version = "0.22.0" description = "Automatic documentation from sources, for MkDocs." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1951,7 +1982,6 @@ python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"] name = "mkdocstrings-python" version = "1.5.2" description = "A Python handler for mkdocstrings." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1967,7 +1997,6 @@ mkdocstrings = ">=0.20" name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1979,7 +2008,6 @@ files = [ name = "napalm" version = "4.1.0" description = "Network Automation and Programmability Abstraction Layer with Multivendor support" -category = "main" optional = false python-versions = "*" files = [ @@ -2010,14 +2038,13 @@ typing-extensions = ">=4.3.0" [[package]] name = "nautobot" -version = "2.0.4" +version = "2.1.1" description = "Source of truth and network automation platform." -category = "main" optional = false python-versions = ">=3.8,<3.12" files = [ - {file = "nautobot-2.0.4-py3-none-any.whl", hash = "sha256:78687b72b90b5d7a6e70399bb16183eca2e0c8f25a574ae1b5d6a6ed3b7c01c8"}, - {file = "nautobot-2.0.4.tar.gz", hash = "sha256:3589c83a1563ba4a6553862f8ae5a56ef0efbf4e6574b429a871178eb8f07512"}, + {file = "nautobot-2.1.1-py3-none-any.whl", hash = "sha256:0b1592274bdb89b767266ec6b6837e67c2b82e2d7bf02308d6e2f877cf839731"}, + {file = "nautobot-2.1.1.tar.gz", hash = "sha256:62df1aa1a972396973df002b51f10dac7c76feeb52387fb94c1d10e41a2aa3e5"}, ] [package.dependencies] @@ -2038,7 +2065,7 @@ django-redis = ">=5.3.0,<5.4.0" django-tables2 = ">=2.6.0,<2.7.0" django-taggit = ">=4.0.0,<4.1.0" django-timezone-field = ">=5.1,<5.2" -django-tree-queries = ">=0.15.0,<0.16.0" +django-tree-queries = ">=0.16.1,<0.17.0" django-webserver = ">=1.2.0,<1.3.0" djangorestframework = ">=3.14.0,<3.15.0" drf-react-template-framework = ">=0.0.17,<0.0.18" @@ -2056,7 +2083,7 @@ netutils = ">=1.6.0,<2.0.0" packaging = ">=23.1,<23.2" Pillow = ">=10.0.0,<10.1.0" prometheus-client = ">=0.17.1,<0.18.0" -psycopg2-binary = ">=2.9.6,<2.10.0" +psycopg2-binary = ">=2.9.9,<2.10.0" python-slugify = ">=8.0.1,<8.1.0" pyuwsgi = ">=2.0.21,<2.1.0" PyYAML = ">=6.0,<6.1" @@ -2071,15 +2098,62 @@ napalm = ["napalm (>=4.1.0,<4.2.0)"] remote-storage = ["django-storages (>=1.13.2,<1.14.0)"] sso = ["social-auth-core[openidconnect,saml] (>=4.4.2,<4.5.0)"] +[[package]] +name = "nautobot-plugin-nornir" +version = "2.0.0" +description = "Nautobot Nornir plugin." +optional = false +python-versions = ">=3.8,<3.12" +files = [ + {file = "nautobot_plugin_nornir-2.0.0-py3-none-any.whl", hash = "sha256:9789fa5b0ba342687f8692a29ad28b1194c02506fb3ce3d778cf245a492987b0"}, + {file = "nautobot_plugin_nornir-2.0.0.tar.gz", hash = "sha256:24d663868e5f96e13f7caf2033c71acb1296715a9fb84e1aff41742fa583b8ef"}, +] + +[package.dependencies] +netutils = ">=1.6.0" +nornir-nautobot = ">=3.0.0,<4.0.0" + +[package.extras] +nautobot = ["nautobot (>=2.0.0,<3.0.0)"] + +[[package]] +name = "nautobot-ssot" +version = "2.2.0" +description = "Nautobot Single Source of Truth" +optional = false +python-versions = ">=3.8,<3.12" +files = [ + {file = "nautobot_ssot-2.2.0-py3-none-any.whl", hash = "sha256:0abd1139f45c438a9298d341a2a1792cbfbc4381628f16cc9eedf03beb1c063d"}, + {file = "nautobot_ssot-2.2.0.tar.gz", hash = "sha256:2ca0871737d586bcc660e6857e4f446f1d1a7859c13a395570b59288ae4be2c4"}, +] + +[package.dependencies] +diffsync = ">=1.6.0,<2.0.0" +drf-spectacular = "0.26.3" +Markdown = "!=3.3.5" +nautobot = ">=2.0.0,<3.0.0" +packaging = ">=21.3,<24" +prometheus-client = ">=0.17.1,<0.18.0" + +[package.extras] +aci = ["PyYAML (>=6)"] +all = ["Jinja2 (>=2.11.3)", "PyYAML (>=6)", "cloudvision (>=1.9.0,<2.0.0)", "cvprac (>=1.2.2,<2.0.0)", "dnspython (>=2.1.0,<3.0.0)", "ijson (>=2.5.1)", "ipfabric (>=6.0.9,<6.1.0)", "ipfabric-diagrams (>=6.0.2,<6.1.0)", "nautobot-device-lifecycle-mgmt (>=2.0.0,<3.0.0)", "netutils (>=1.0.0,<2.0.0)", "oauthlib (>=3.1.0)", "python-magic (>=0.4.15)", "pytz (>=2019.3)", "requests (>=2.21.0)", "requests-oauthlib (>=1.3.0)", "six (>=1.13.0)"] +aristacv = ["cloudvision (>=1.9.0,<2.0.0)", "cvprac (>=1.2.2,<2.0.0)"] +device42 = ["requests (>=2.21.0)"] +infoblox = ["dnspython (>=2.1.0,<3.0.0)"] +ipfabric = ["httpx (>=0.23.3)", "ipfabric (>=6.0.9,<6.1.0)", "ipfabric-diagrams (>=6.0.2,<6.1.0)", "netutils (>=1.0.0,<2.0.0)"] +nautobot-device-lifecycle-mgmt = ["nautobot-device-lifecycle-mgmt (>=2.0.0,<3.0.0)"] +pysnow = ["ijson (>=2.5.1)", "oauthlib (>=3.1.0)", "python-magic (>=0.4.15)", "pytz (>=2019.3)", "requests (>=2.21.0)", "requests-oauthlib (>=1.3.0)", "six (>=1.13.0)"] +servicenow = ["Jinja2 (>=2.11.3)", "PyYAML (>=6)", "ijson (>=2.5.1)", "oauthlib (>=3.1.0)", "python-magic (>=0.4.15)", "pytz (>=2019.3)", "requests (>=2.21.0)", "requests-oauthlib (>=1.3.0)", "six (>=1.13.0)"] + [[package]] name = "ncclient" -version = "0.6.13" +version = "0.6.15" description = "Python library for NETCONF clients" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ - {file = "ncclient-0.6.13.tar.gz", hash = "sha256:f9f8cea8bcbe057e1b948b9cd1b241eafb8a3f73c4981fbdfa1cc6ed69c0a7b3"}, + {file = "ncclient-0.6.15.tar.gz", hash = "sha256:6757cb41bc9160dfe47f22f5de8cf2f1adf22f27463fb50453cc415ab96773d8"}, ] [package.dependencies] @@ -2092,7 +2166,6 @@ six = "*" name = "netaddr" version = "0.8.0" description = "A network address manipulation library for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -2104,7 +2177,6 @@ files = [ name = "netmiko" version = "4.3.0" description = "Multi-vendor library to simplify legacy CLI connections to network devices" -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2124,7 +2196,6 @@ textfsm = ">=1.1.3" name = "netutils" version = "1.6.0" description = "Common helper functions useful in network automation." -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2135,16 +2206,115 @@ files = [ [package.extras] optionals = ["jsonschema (>=4.17.3,<5.0.0)", "napalm (>=4.0.0,<5.0.0)"] +[[package]] +name = "nornir" +version = "3.4.1" +description = "Pluggable multi-threaded framework with inventory management to help operate collections of devices" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "nornir-3.4.1-py3-none-any.whl", hash = "sha256:db079cb95e3baf855530f4f40cb6ee93f93e1bf3cb74ac08180546adb1b987b8"}, + {file = "nornir-3.4.1.tar.gz", hash = "sha256:82a90a3478a3890bef8ad51b256fa966e6e4ca326cbe20a230918ef907cf68c3"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4,<5", markers = "python_version < \"3.10\""} +mypy_extensions = ">=1.0.0,<2.0.0" +"ruamel.yaml" = ">=0.17" + +[[package]] +name = "nornir-jinja2" +version = "0.2.0" +description = "Jinja2 plugins for nornir" +optional = false +python-versions = ">=3.6,<4.0" +files = [ + {file = "nornir_jinja2-0.2.0-py3-none-any.whl", hash = "sha256:0c446bec7a8492923d4eb9ca00fb327603b41bc35d5f0112843c048737b506b1"}, + {file = "nornir_jinja2-0.2.0.tar.gz", hash = "sha256:9ee5e725fe5543dcba4ec8b976804e9e88ecd356ea3b62bad97578cea0de1f75"}, +] + +[package.dependencies] +jinja2 = ">=2.11.2,<4" +nornir = ">=3,<4" + +[[package]] +name = "nornir-napalm" +version = "0.4.0" +description = "NAPALM's plugins for nornir" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "nornir_napalm-0.4.0-py3-none-any.whl", hash = "sha256:20a41499aecf9c4e41181b18a73b2ee3ab7763824645ac0eb80abb3973a5f17e"}, + {file = "nornir_napalm-0.4.0.tar.gz", hash = "sha256:84e0711ccbdf24bdb228042ab530bf688d6b2b8f12c65fa3cb73499c6974a9de"}, +] + +[package.dependencies] +napalm = ">=4,<5" +nornir = ">=3,<4" + +[[package]] +name = "nornir-nautobot" +version = "3.1.0" +description = "Nornir Nautobot" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "nornir_nautobot-3.1.0-py3-none-any.whl", hash = "sha256:23197181c17fa6de503679490d04fdc7315133ec5ddc9b549eb0794af9da418f"}, + {file = "nornir_nautobot-3.1.0.tar.gz", hash = "sha256:5bc58d83650fb87aec456358205d455aaa5289345e2bc18f32d6bfa421eec63c"}, +] + +[package.dependencies] +httpx = ">=0.24.1,<0.25.0" +netutils = ">=1.6.0,<2.0.0" +nornir = ">=3.0.0,<4.0.0" +nornir-jinja2 = ">=0.2.0,<0.3.0" +nornir-napalm = ">=0.4.0,<1.0.0" +nornir-netmiko = ">=1,<2" +nornir-utils = ">=0,<1" +pynautobot = ">=2.0.0rc2" +requests = ">=2.25.1,<3.0.0" + +[package.extras] +mikrotik-driver = ["routeros-api (>=0.17.0,<0.18.0)"] + +[[package]] +name = "nornir-netmiko" +version = "1.0.1" +description = "Netmiko's plugins for Nornir" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "nornir_netmiko-1.0.1-py3-none-any.whl", hash = "sha256:eaee2944ad386b40c0719e8ac393ac63d531f44fb9a07d660bae7de430f12834"}, + {file = "nornir_netmiko-1.0.1.tar.gz", hash = "sha256:498546df001e0e499f10c5646d1356e361ccbb165b1335b89cfe8f19765e24d7"}, +] + +[package.dependencies] +netmiko = ">=4.0.0,<5.0.0" + +[[package]] +name = "nornir-utils" +version = "0.2.0" +description = "Collection of plugins and functions for nornir that don't require external dependencies" +optional = false +python-versions = ">=3.6.2,<4.0.0" +files = [ + {file = "nornir_utils-0.2.0-py3-none-any.whl", hash = "sha256:b4c430793a74f03affd5ff2d90abc8c67a28c7ff325f48e3a01a9a44ec71b844"}, + {file = "nornir_utils-0.2.0.tar.gz", hash = "sha256:4de6aaa35e5c1a98e1c84db84a008b0b1e974dc65d88484f2dcea3e30c95fbc2"}, +] + +[package.dependencies] +colorama = ">=0.4.3,<0.5.0" +nornir = ">=3,<4" + [[package]] name = "ntc-templates" -version = "4.0.1" +version = "4.1.0" description = "TextFSM Templates for Network Devices, and Python wrapper for TextFSM's CliTable." -category = "main" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8,<4.0" files = [ - {file = "ntc_templates-4.0.1-py3-none-any.whl", hash = "sha256:4d20943fdffc70595fb2b983c6fcab926635c3e4621aaec13a9063a9a61241dd"}, - {file = "ntc_templates-4.0.1.tar.gz", hash = "sha256:5bd158592ac99e769a0b7e82e53fd714a410f912fc9e438e95cc0130cf7290a8"}, + {file = "ntc_templates-4.1.0-py3-none-any.whl", hash = "sha256:61acf390ac22ee87c82c3923ea7cda8b2918f6321973de3b7878beedc2818cb1"}, + {file = "ntc_templates-4.1.0.tar.gz", hash = "sha256:c4985893f347852e1ddbdf8205c098fb23d837185020b4f7f909a547695794df"}, ] [package.dependencies] @@ -2154,7 +2324,6 @@ textfsm = ">=1.1.0,<2.0.0" name = "oauthlib" version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2171,7 +2340,6 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] name = "packaging" version = "23.1" description = "Core utilities for Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2181,14 +2349,13 @@ files = [ [[package]] name = "paramiko" -version = "3.3.1" +version = "3.4.0" description = "SSH2 protocol library" -category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "paramiko-3.3.1-py3-none-any.whl", hash = "sha256:b7bc5340a43de4287bbe22fe6de728aa2c22468b2a849615498dd944c2f275eb"}, - {file = "paramiko-3.3.1.tar.gz", hash = "sha256:6a3777a961ac86dbef375c5f5b8d50014a1a96d0fd7f054a43bc880134b0ff77"}, + {file = "paramiko-3.4.0-py3-none-any.whl", hash = "sha256:43f0b51115a896f9c00f59618023484cb3a14b98bbceab43394a39c6739b7ee7"}, + {file = "paramiko-3.4.0.tar.gz", hash = "sha256:aac08f26a31dc4dffd92821527d1682d99d52f9ef6851968114a8728f3c274d3"}, ] [package.dependencies] @@ -2203,21 +2370,19 @@ invoke = ["invoke (>=2.0)"] [[package]] name = "pathspec" -version = "0.11.2" +version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, - {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] [[package]] name = "pbr" version = "6.0.0" description = "Python Build Reasonableness" -category = "dev" optional = false python-versions = ">=2.6" files = [ @@ -2229,7 +2394,6 @@ files = [ name = "pillow" version = "10.0.1" description = "Python Imaging Library (Fork)" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2297,7 +2461,6 @@ tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "pa name = "pkgutil-resolve-name" version = "1.3.10" description = "Resolve a name to an object." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2307,14 +2470,13 @@ files = [ [[package]] name = "platformdirs" -version = "4.0.0" +version = "4.1.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "platformdirs-4.0.0-py3-none-any.whl", hash = "sha256:118c954d7e949b35437270383a3f2531e99dd93cf7ce4dc8340d3356d30f173b"}, - {file = "platformdirs-4.0.0.tar.gz", hash = "sha256:cb633b2bcf10c51af60beb0ab06d2f1d69064b43abf4c185ca6b28865f3f9731"}, + {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, + {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, ] [package.extras] @@ -2325,7 +2487,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co name = "prometheus-client" version = "0.17.1" description = "Python client for the Prometheus monitoring system." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2340,7 +2501,6 @@ twisted = ["twisted"] name = "promise" version = "2.3" description = "Promises/A+ implementation for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -2355,14 +2515,13 @@ test = ["coveralls", "futures", "mock", "pytest (>=2.7.3)", "pytest-benchmark", [[package]] name = "prompt-toolkit" -version = "3.0.41" +version = "3.0.43" description = "Library for building powerful interactive command lines in Python" -category = "main" optional = false python-versions = ">=3.7.0" files = [ - {file = "prompt_toolkit-3.0.41-py3-none-any.whl", hash = "sha256:f36fe301fafb7470e86aaf90f036eef600a3210be4decf461a5b1ca8403d3cb2"}, - {file = "prompt_toolkit-3.0.41.tar.gz", hash = "sha256:941367d97fc815548822aa26c2a269fdc4eb21e9ec05fc5d447cf09bad5d75f0"}, + {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, + {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, ] [package.dependencies] @@ -2372,7 +2531,6 @@ wcwidth = "*" name = "psycopg2-binary" version = "2.9.9" description = "psycopg2 - Python-PostgreSQL Database Adapter" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2402,6 +2560,7 @@ files = [ {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, @@ -2410,6 +2569,8 @@ files = [ {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, @@ -2451,7 +2612,6 @@ files = [ name = "pycodestyle" version = "2.9.1" description = "Python style guide checker" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2463,7 +2623,6 @@ files = [ name = "pycparser" version = "2.21" description = "C parser in Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2471,11 +2630,62 @@ files = [ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] +[[package]] +name = "pydantic" +version = "1.10.14" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + [[package]] name = "pydocstyle" version = "6.3.0" description = "Python docstring style checker" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2493,7 +2703,6 @@ toml = ["tomli (>=1.2.3)"] name = "pyeapi" version = "1.0.2" description = "Python Client for eAPI" -category = "main" optional = false python-versions = "*" files = [ @@ -2511,7 +2720,6 @@ test = ["coverage", "mock"] name = "pyflakes" version = "2.5.0" description = "passive checker of Python programs" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2521,14 +2729,13 @@ files = [ [[package]] name = "pygments" -version = "2.17.1" +version = "2.17.2" description = "Pygments is a syntax highlighting package written in Python." -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pygments-2.17.1-py3-none-any.whl", hash = "sha256:1b37f1b1e1bff2af52ecaf28cc601e2ef7077000b227a0675da25aef85784bc4"}, - {file = "pygments-2.17.1.tar.gz", hash = "sha256:e45a0e74bf9c530f564ca81b8952343be986a29f6afe7f5ad95c5f06b7bdf5e8"}, + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, ] [package.extras] @@ -2539,7 +2746,6 @@ windows-terminal = ["colorama (>=0.4.6)"] name = "pyjwt" version = "2.8.0" description = "JSON Web Token implementation in Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2557,7 +2763,6 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] name = "pylint" version = "2.17.7" description = "python code static checker" -category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -2587,7 +2792,6 @@ testutils = ["gitpython (>3)"] name = "pylint-django" version = "2.5.5" description = "A Pylint plugin to help Pylint understand the Django web framework" -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -2606,7 +2810,6 @@ with-django = ["Django (>=2.2)"] name = "pylint-nautobot" version = "0.2.1" description = "Custom Pylint Rules for Nautobot" -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -2624,7 +2827,6 @@ tomli = ">=2.0.1,<3.0.0" name = "pylint-plugin-utils" version = "0.8.2" description = "Utilities and helpers for writing Pylint plugins" -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -2639,7 +2841,6 @@ pylint = ">=1.7" name = "pymdown-extensions" version = "10.4" description = "Extension pack for Python Markdown." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2658,7 +2859,6 @@ extra = ["pygments (>=2.12)"] name = "pynacl" version = "1.5.0" description = "Python binding to the Networking and Cryptography (NaCl) library" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2681,11 +2881,25 @@ cffi = ">=1.4.1" docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] +[[package]] +name = "pynautobot" +version = "2.0.1" +description = "Nautobot API client library" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pynautobot-2.0.1-py3-none-any.whl", hash = "sha256:14f9f05ef4c9f8918a56e4892c3badd3c25679aaf5cc6292adcebd7e1ba419c7"}, + {file = "pynautobot-2.0.1.tar.gz", hash = "sha256:de8bf725570baa5bee3a47e2a0de01605ab97e852e5f534b3d8e54a4ed6e2043"}, +] + +[package.dependencies] +requests = ">=2.30.0,<3.0.0" +urllib3 = ">=1.21.1,<1.27" + [[package]] name = "pyparsing" version = "3.1.1" description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "main" optional = false python-versions = ">=3.6.8" files = [ @@ -2700,7 +2914,6 @@ diagrams = ["jinja2", "railroad-diagrams"] name = "pyserial" version = "3.5" description = "Python Serial Port Extension" -category = "main" optional = false python-versions = "*" files = [ @@ -2715,7 +2928,6 @@ cp2110 = ["hidapi"] name = "python-crontab" version = "3.0.0" description = "Python Crontab API" -category = "main" optional = false python-versions = "*" files = [ @@ -2734,7 +2946,6 @@ cron-schedule = ["croniter"] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -2749,7 +2960,6 @@ six = ">=1.5" name = "python-slugify" version = "8.0.1" description = "A Python slugify application that also handles Unicode" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2767,7 +2977,6 @@ unidecode = ["Unidecode (>=1.1.1)"] name = "python3-openid" version = "3.2.0" description = "OpenID support for modern servers and consumers." -category = "main" optional = false python-versions = "*" files = [ @@ -2786,7 +2995,6 @@ postgresql = ["psycopg2"] name = "pytz" version = "2023.3.post1" description = "World timezone definitions, modern and historical" -category = "main" optional = false python-versions = "*" files = [ @@ -2796,62 +3004,60 @@ files = [ [[package]] name = "pyuwsgi" -version = "2.0.23" +version = "2.0.23.post0" description = "The uWSGI server" -category = "main" optional = false python-versions = "*" files = [ - {file = "pyuwsgi-2.0.23-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0bb538ef57960389d67bcd4a9e7ebb562ed13a4556a5596305ce5361e121fc4e"}, - {file = "pyuwsgi-2.0.23-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efd9689290c3b4afec7d28f1c43ec60f9ee905abf66a501584454cbf6b620678"}, - {file = "pyuwsgi-2.0.23-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80e6fd3a9f49fad9404dd2622116db16990dd9c5061461fd700a82b429f0ee2b"}, - {file = "pyuwsgi-2.0.23-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4796cb1d35eff2cdae6ea01ffb26d2ec0ddf5c692d9f4bf5a28cab61baf78f4"}, - {file = "pyuwsgi-2.0.23-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:366dbc57eaee7b37f3e1c4039fcd7ba2a5693579e17ba07704038ffa28a8be57"}, - {file = "pyuwsgi-2.0.23-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:40ddfcb7d972cac169e62253027f932bb047a995cfbe98398c1451b137e3cf8d"}, - {file = "pyuwsgi-2.0.23-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f4dc785d94878088fd2b4b6da7a630b5538d461b92b6a767cb56401dac1373b9"}, - {file = "pyuwsgi-2.0.23-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbde1da759d1486d6b20938b8f03b84b4dfe4a1b7ba111c586b1eaed6cd85cdc"}, - {file = "pyuwsgi-2.0.23-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12568dbacacd02b22791b352c3e93a9307d565512a851b36483ffe4db69b711e"}, - {file = "pyuwsgi-2.0.23-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9db7b77bf6ee429da0583f36f168bcf1294195d7a4ac53b53d1f5d8ac8c2717"}, - {file = "pyuwsgi-2.0.23-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1de2f99dc4642aea7226889c76083884260920adc14a4a533660479941c6e6f2"}, - {file = "pyuwsgi-2.0.23-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fbad05b405630ddaaf8010822fc8bc553551bcf691df2d1ffbfd4d2204f9973f"}, - {file = "pyuwsgi-2.0.23-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:39107b8abaf488e890d53372bef7b80fdf350b703bbfa2f4ded1002eea31b198"}, - {file = "pyuwsgi-2.0.23-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:feb783ef451dc09cd37b2376ccc9e8ff28d3296542df0351e0a4502c8fac765c"}, - {file = "pyuwsgi-2.0.23-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ea11e270161e5cc8f6935778841f30e3226b0ee3b70185d88d8fa2bf0317bdc9"}, - {file = "pyuwsgi-2.0.23-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3608203a37ebf5580f3fc4901ae1295fd181caa7ec49d29b7dcc1864725049e"}, - {file = "pyuwsgi-2.0.23-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf9d22dd5397a80cf91242f173c4bab0104c7c8b17d286b289a9582a30643cac"}, - {file = "pyuwsgi-2.0.23-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6505cb52b25eecf81338b9f17f4b47ec6288f3911eb65a5a9f3be03ed2ba0b97"}, - {file = "pyuwsgi-2.0.23-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:563270210d79a9e1a76ead34dec40b0ddf1491ac44e02e9d9fd41f8e08938f07"}, - {file = "pyuwsgi-2.0.23-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1883c08aa902dbeb7bd70c5ea319452ecbce49adc715ece4c4bef8c0acfb8523"}, - {file = "pyuwsgi-2.0.23-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:79641c8fccc507288b58805c0edb0540713b9fb65d445d703329606a3fbc2fab"}, - {file = "pyuwsgi-2.0.23-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:02a21ce1175599d0e9d63dc3bb576f7662e1ba3412b746bd9780708f55b35587"}, - {file = "pyuwsgi-2.0.23-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d72e622517522df0e8e04fc1f2aff0d1cafeececc44eecf6f83646f405ef474f"}, - {file = "pyuwsgi-2.0.23-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58cb2c48bfb34b73f5a7586c55d2e29e927a7ca6ca45153e9d860d380f4d6ef1"}, - {file = "pyuwsgi-2.0.23-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7684b4c97bb0d52f3e53f5f67a39241ed1ea234e4a8c351a7ea4a4cfd397909"}, - {file = "pyuwsgi-2.0.23-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4983d2f201d14bf7ed6ec2f6e9449e046440476877e55b1cf6f165d2eb6d3cf4"}, - {file = "pyuwsgi-2.0.23-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:462dccd00ad01a33744a7c061fa2080b58e6b4c0f25cb95e8f9628a42d10f04f"}, - {file = "pyuwsgi-2.0.23-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:764e833b890a82cf94f60087147bd98d8d8769e133e1c1289cd7b8af4d4e19ee"}, - {file = "pyuwsgi-2.0.23-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:120ae908df0b006d1e88b43a3dfbb2f02212ac768d75baefc2a20cdf1b279b11"}, - {file = "pyuwsgi-2.0.23-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78d8ab2ac544a80bfb57a3019f1768e2ca327993f3a2e39aee92b0a70746f0bb"}, - {file = "pyuwsgi-2.0.23-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1e3151b5495e3b1882b07a72e040e7a0422e8e5e58ceafc4cc046428c781f86"}, - {file = "pyuwsgi-2.0.23-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56ba238ccf4e12de0bba0ee7d92e316e3acda22419e3403dc0d474420baf3d71"}, - {file = "pyuwsgi-2.0.23-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:173709af71a86d9efee16a702933fee2ee3e6ac6b7f80eee86414bab0c80838a"}, - {file = "pyuwsgi-2.0.23-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a35ab28beba766f89c7a0db6a6a0fcedb72d7c9ff3262f3f27418bf5b757602e"}, - {file = "pyuwsgi-2.0.23-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0fd1f679c4597641bb30887e9180c42dfabf4b3e7e2747425f4468fe93a17e51"}, - {file = "pyuwsgi-2.0.23-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7c8eca007320f91f4009eca578e3014a443e7f7b33dabb2454754971fd5df4c0"}, - {file = "pyuwsgi-2.0.23-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cebebc9a322f3d5caf19938114d66ff341852756511f99f1892fbc684120501"}, - {file = "pyuwsgi-2.0.23-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c8f2311699e2562670e3ce979bbb566302e7951e758ee80f77a42f1e13a2e221"}, - {file = "pyuwsgi-2.0.23-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf687febfb7f1cfcbedb07762f39279df8725e9e681a859448ee1c1e79a39180"}, - {file = "pyuwsgi-2.0.23-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b27a7dd26e134c134ba0ed17bc28209eb459709480bdc773ce7da5ecc016c81a"}, - {file = "pyuwsgi-2.0.23-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:447a2a72e4285a1617154c496005fbaf1fbf5b3cf6e81186a13e3627ed7b0994"}, - {file = "pyuwsgi-2.0.23-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a13932ba8079b627d6233c8a04c1544bbe2a9007ddeed7f68f46401b1d0c5d5d"}, - {file = "pyuwsgi-2.0.23.tar.gz", hash = "sha256:74ac3e9c641969a3073c67793773a73bd7968ddcc3fa810c5396415e80cc0df1"}, + {file = "pyuwsgi-2.0.23.post0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:49dfe43726f4a71d3440f7a36eb3ba5b361e04807164d34ececda138e2dc2375"}, + {file = "pyuwsgi-2.0.23.post0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65420b185003dd5b66f41a6d1aa03d63d953a18e818bd4a013fc8e9d580f11cb"}, + {file = "pyuwsgi-2.0.23.post0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7bc7c60d8e1242b3a638754d2487c505112c642010c460442993be85f3ca9ec7"}, + {file = "pyuwsgi-2.0.23.post0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1ae2abaa47cb9c0018c790935897aec8001fb709dfac54286a37ab2e0b88dca"}, + {file = "pyuwsgi-2.0.23.post0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:af376cafca1501b2d4b8184c427c55b32c1a3dcb6070dc27115ca552898c7ff8"}, + {file = "pyuwsgi-2.0.23.post0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f56a729808ed7aa1d7973d6f900a75bc36b976b7ab6c8867064f36e34cdafd4e"}, + {file = "pyuwsgi-2.0.23.post0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4270e68bb2633b0fc132aad6d415e4e0cde67093a97e64dd84bd186264a8c083"}, + {file = "pyuwsgi-2.0.23.post0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:97c940a69242dc45658dba3330e64d809f34e33d9631547b6928fd20075b4bb9"}, + {file = "pyuwsgi-2.0.23.post0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8cac396c2e8e0d199bde9bb8fc90538c82207d0c3d722d08b9a63619b41945d6"}, + {file = "pyuwsgi-2.0.23.post0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:59d6a718ad42be54b2b80c8c236b728b8b83fb93438786e95f63fc259229ccd7"}, + {file = "pyuwsgi-2.0.23.post0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c38b5bb59e1bf59030f2d43a3e67aa18e6089c8e7f43e9c5f2099567466d35f4"}, + {file = "pyuwsgi-2.0.23.post0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7199009447770812056a5b417c4847bd44db1b0230d4bb64c48a4ffacd4e96f0"}, + {file = "pyuwsgi-2.0.23.post0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f361d168cf175796fe36ab6a88dee079245a2f08e587e8190a38bd1b33238fa8"}, + {file = "pyuwsgi-2.0.23.post0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:52a45e98fe746ae9c9437c5b6f0cdb6117f979c8800f09c8e4dae2997786affd"}, + {file = "pyuwsgi-2.0.23.post0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f7455976abfa1dd43b5f3376f7f04a925c16babba1c3fc6edcdd81f5c0f24383"}, + {file = "pyuwsgi-2.0.23.post0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:508f5d84cd677cecc640d0e321badc61080c40c61843cd130b32f356729a599f"}, + {file = "pyuwsgi-2.0.23.post0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dcf93afec49f5cf29b0a68f4d2fb3e44a3ad1f205704ab2f41f9db47dacb8e13"}, + {file = "pyuwsgi-2.0.23.post0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a19ab0d5c43bc179a70cb079feb7804e39be6326bf98ec38808fcea5e7d44bd0"}, + {file = "pyuwsgi-2.0.23.post0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8c5283e38c4fd3130cd7384d57535d60435c63b81a41a6463f26f340efeda9de"}, + {file = "pyuwsgi-2.0.23.post0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0d9dfb79bffa552e5985385bc114ecec1d4079b95ce24796f577ef0df727da06"}, + {file = "pyuwsgi-2.0.23.post0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b531ac80155b6c839215d05f95569b34e614e97aab055072c74112b1d2a45546"}, + {file = "pyuwsgi-2.0.23.post0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:eae183104f3fa26f3d9c28fe75f2ad914e3a365103a6a66e329c0f59f9e461d4"}, + {file = "pyuwsgi-2.0.23.post0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a34ab2863ff0120c6e0e75c63c9ced462bfb4777e6b8237e4e1df60fb34af51"}, + {file = "pyuwsgi-2.0.23.post0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc18481f336be63e80fc983aaa1a040e7c69c25c3145edcf93f0e6de2f1ad0d6"}, + {file = "pyuwsgi-2.0.23.post0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:245da016b424c261d148bbb83d2407aac77e6d5793cbd4e23a17f7e3a8aa061f"}, + {file = "pyuwsgi-2.0.23.post0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8de1d975be958cff9122ecc82bf393bf7f41fff6f1047e76ed972047763bbd31"}, + {file = "pyuwsgi-2.0.23.post0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d75859311605a510a6050ec622ec4beb9f2f8cce5f090e5cea70a1ff74133f8b"}, + {file = "pyuwsgi-2.0.23.post0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d3ad00212ffbb208b7146744ad3710b908734f844b5e2bf533fb09fc44726f37"}, + {file = "pyuwsgi-2.0.23.post0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:374142b106de187c4572b4441a367fa3466d9ea5aaabe475da42bb9f2202a690"}, + {file = "pyuwsgi-2.0.23.post0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:137db348bd5f585e8e5a609046d3ac9ef58483bba93de1e3c568c1a860c31b9c"}, + {file = "pyuwsgi-2.0.23.post0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52b7a837dbc8702b245481514a32c88418a42df7b5ee68d45695eba457abd3ee"}, + {file = "pyuwsgi-2.0.23.post0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cfcfeb1eaca5f4dd0e6ed9194e7ec98dcb3a8ac108e8f0414ed7c28d608517ef"}, + {file = "pyuwsgi-2.0.23.post0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7887c2acc8262223ff9cdce974851da0917818c12ef3ec0f49ec11a9943731fe"}, + {file = "pyuwsgi-2.0.23.post0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bae72689ddf8e0bdd1a974a364ed052dd19d7897f1d5c3efcf8d9010c60f56ef"}, + {file = "pyuwsgi-2.0.23.post0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9565569474f9e9f02f6fa490d96d8c5c7e3004829c01c0446cdb74c618b6a433"}, + {file = "pyuwsgi-2.0.23.post0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6ba86c6aa815635eefe7728b9b219af281a4e956bab240c5871db6c151c300a8"}, + {file = "pyuwsgi-2.0.23.post0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29ab8a02e812fbc34026ddb79f274a574c96fc488f384f320d3af37bd7edf932"}, + {file = "pyuwsgi-2.0.23.post0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4f9c0694a11d8dfbbe2814b8b242a7c4dfa143b63e01447fabce9966a90fa60"}, + {file = "pyuwsgi-2.0.23.post0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f75e45e14462cbb94fc32242378eef7bda97173de57a68a5d46e4053677a7547"}, + {file = "pyuwsgi-2.0.23.post0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e7140fc3548cd9d0f02c4511b679ba47d26593d2cceb249d2d147c9901d90022"}, + {file = "pyuwsgi-2.0.23.post0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ed348cc4c5a4964c8e8fa61ab0ef50c00f7676179a6c0cb0f55f0122db1db1c2"}, + {file = "pyuwsgi-2.0.23.post0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17a8818ec98f92e7935cf0ff56ed4f02a069362e10554df969f70fcdf78d9199"}, + {file = "pyuwsgi-2.0.23.post0.tar.gz", hash = "sha256:04ec79c4a3acad21002ebf1479050c3208605d27cc6659008df51092951eeb8e"}, ] [[package]] name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2860,6 +3066,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2867,8 +3074,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2885,6 +3099,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2892,6 +3107,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -2901,7 +3117,6 @@ files = [ name = "pyyaml-env-tag" version = "0.1" description = "A custom YAML tag for referencing environment variables in YAML files. " -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2916,7 +3131,6 @@ pyyaml = "*" name = "redis" version = "5.0.1" description = "Python client for Redis database and key-value store" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2933,14 +3147,13 @@ ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)" [[package]] name = "referencing" -version = "0.31.0" +version = "0.32.1" description = "JSON Referencing + Python" -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "referencing-0.31.0-py3-none-any.whl", hash = "sha256:381b11e53dd93babb55696c71cf42aef2d36b8a150c49bf0bc301e36d536c882"}, - {file = "referencing-0.31.0.tar.gz", hash = "sha256:cc28f2c88fbe7b961a7817a0abc034c09a1e36358f82fedb4ffdf29a25398863"}, + {file = "referencing-0.32.1-py3-none-any.whl", hash = "sha256:7e4dc12271d8e15612bfe35792f5ea1c40970dadf8624602e33db2758f7ee554"}, + {file = "referencing-0.32.1.tar.gz", hash = "sha256:3c57da0513e9563eb7e203ebe9bb3a1b509b042016433bd1e45a2853466c3dd3"}, ] [package.dependencies] @@ -2949,107 +3162,110 @@ rpds-py = ">=0.7.0" [[package]] name = "regex" -version = "2023.10.3" +version = "2023.12.25" description = "Alternative regular expression module, to replace re." -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "regex-2023.10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c34d4f73ea738223a094d8e0ffd6d2c1a1b4c175da34d6b0de3d8d69bee6bcc"}, - {file = "regex-2023.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8f4e49fc3ce020f65411432183e6775f24e02dff617281094ba6ab079ef0915"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cd1bccf99d3ef1ab6ba835308ad85be040e6a11b0977ef7ea8c8005f01a3c29"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:81dce2ddc9f6e8f543d94b05d56e70d03a0774d32f6cca53e978dc01e4fc75b8"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c6b4d23c04831e3ab61717a707a5d763b300213db49ca680edf8bf13ab5d91b"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c15ad0aee158a15e17e0495e1e18741573d04eb6da06d8b84af726cfc1ed02ee"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6239d4e2e0b52c8bd38c51b760cd870069f0bdf99700a62cd509d7a031749a55"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4a8bf76e3182797c6b1afa5b822d1d5802ff30284abe4599e1247be4fd6b03be"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9c727bbcf0065cbb20f39d2b4f932f8fa1631c3e01fcedc979bd4f51fe051c5"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3ccf2716add72f80714b9a63899b67fa711b654be3fcdd34fa391d2d274ce767"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:107ac60d1bfdc3edb53be75e2a52aff7481b92817cfdddd9b4519ccf0e54a6ff"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:00ba3c9818e33f1fa974693fb55d24cdc8ebafcb2e4207680669d8f8d7cca79a"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f0a47efb1dbef13af9c9a54a94a0b814902e547b7f21acb29434504d18f36e3a"}, - {file = "regex-2023.10.3-cp310-cp310-win32.whl", hash = "sha256:36362386b813fa6c9146da6149a001b7bd063dabc4d49522a1f7aa65b725c7ec"}, - {file = "regex-2023.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:c65a3b5330b54103e7d21cac3f6bf3900d46f6d50138d73343d9e5b2900b2353"}, - {file = "regex-2023.10.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:90a79bce019c442604662d17bf69df99090e24cdc6ad95b18b6725c2988a490e"}, - {file = "regex-2023.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c7964c2183c3e6cce3f497e3a9f49d182e969f2dc3aeeadfa18945ff7bdd7051"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ef80829117a8061f974b2fda8ec799717242353bff55f8a29411794d635d964"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5addc9d0209a9afca5fc070f93b726bf7003bd63a427f65ef797a931782e7edc"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c148bec483cc4b421562b4bcedb8e28a3b84fcc8f0aa4418e10898f3c2c0eb9b"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d1f21af4c1539051049796a0f50aa342f9a27cde57318f2fc41ed50b0dbc4ac"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b9ac09853b2a3e0d0082104036579809679e7715671cfbf89d83c1cb2a30f58"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ebedc192abbc7fd13c5ee800e83a6df252bec691eb2c4bedc9f8b2e2903f5e2a"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d8a993c0a0ffd5f2d3bda23d0cd75e7086736f8f8268de8a82fbc4bd0ac6791e"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:be6b7b8d42d3090b6c80793524fa66c57ad7ee3fe9722b258aec6d0672543fd0"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4023e2efc35a30e66e938de5aef42b520c20e7eda7bb5fb12c35e5d09a4c43f6"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0d47840dc05e0ba04fe2e26f15126de7c755496d5a8aae4a08bda4dd8d646c54"}, - {file = "regex-2023.10.3-cp311-cp311-win32.whl", hash = "sha256:9145f092b5d1977ec8c0ab46e7b3381b2fd069957b9862a43bd383e5c01d18c2"}, - {file = "regex-2023.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:b6104f9a46bd8743e4f738afef69b153c4b8b592d35ae46db07fc28ae3d5fb7c"}, - {file = "regex-2023.10.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bff507ae210371d4b1fe316d03433ac099f184d570a1a611e541923f78f05037"}, - {file = "regex-2023.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:be5e22bbb67924dea15039c3282fa4cc6cdfbe0cbbd1c0515f9223186fc2ec5f"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a992f702c9be9c72fa46f01ca6e18d131906a7180950958f766c2aa294d4b41"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7434a61b158be563c1362d9071358f8ab91b8d928728cd2882af060481244c9e"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2169b2dcabf4e608416f7f9468737583ce5f0a6e8677c4efbf795ce81109d7c"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9e908ef5889cda4de038892b9accc36d33d72fb3e12c747e2799a0e806ec841"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12bd4bc2c632742c7ce20db48e0d99afdc05e03f0b4c1af90542e05b809a03d9"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bc72c231f5449d86d6c7d9cc7cd819b6eb30134bb770b8cfdc0765e48ef9c420"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bce8814b076f0ce5766dc87d5a056b0e9437b8e0cd351b9a6c4e1134a7dfbda9"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:ba7cd6dc4d585ea544c1412019921570ebd8a597fabf475acc4528210d7c4a6f"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b0c7d2f698e83f15228ba41c135501cfe7d5740181d5903e250e47f617eb4292"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5a8f91c64f390ecee09ff793319f30a0f32492e99f5dc1c72bc361f23ccd0a9a"}, - {file = "regex-2023.10.3-cp312-cp312-win32.whl", hash = "sha256:ad08a69728ff3c79866d729b095872afe1e0557251da4abb2c5faff15a91d19a"}, - {file = "regex-2023.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:39cdf8d141d6d44e8d5a12a8569d5a227f645c87df4f92179bd06e2e2705e76b"}, - {file = "regex-2023.10.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4a3ee019a9befe84fa3e917a2dd378807e423d013377a884c1970a3c2792d293"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76066d7ff61ba6bf3cb5efe2428fc82aac91802844c022d849a1f0f53820502d"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe50b61bab1b1ec260fa7cd91106fa9fece57e6beba05630afe27c71259c59b"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fd88f373cb71e6b59b7fa597e47e518282455c2734fd4306a05ca219a1991b0"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ab05a182c7937fb374f7e946f04fb23a0c0699c0450e9fb02ef567412d2fa3"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dac37cf08fcf2094159922edc7a2784cfcc5c70f8354469f79ed085f0328ebdf"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e54ddd0bb8fb626aa1f9ba7b36629564544954fff9669b15da3610c22b9a0991"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3367007ad1951fde612bf65b0dffc8fd681a4ab98ac86957d16491400d661302"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:16f8740eb6dbacc7113e3097b0a36065a02e37b47c936b551805d40340fb9971"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:f4f2ca6df64cbdd27f27b34f35adb640b5d2d77264228554e68deda54456eb11"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:39807cbcbe406efca2a233884e169d056c35aa7e9f343d4e78665246a332f597"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7eece6fbd3eae4a92d7c748ae825cbc1ee41a89bb1c3db05b5578ed3cfcfd7cb"}, - {file = "regex-2023.10.3-cp37-cp37m-win32.whl", hash = "sha256:ce615c92d90df8373d9e13acddd154152645c0dc060871abf6bd43809673d20a"}, - {file = "regex-2023.10.3-cp37-cp37m-win_amd64.whl", hash = "sha256:0f649fa32fe734c4abdfd4edbb8381c74abf5f34bc0b3271ce687b23729299ed"}, - {file = "regex-2023.10.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9b98b7681a9437262947f41c7fac567c7e1f6eddd94b0483596d320092004533"}, - {file = "regex-2023.10.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:91dc1d531f80c862441d7b66c4505cd6ea9d312f01fb2f4654f40c6fdf5cc37a"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82fcc1f1cc3ff1ab8a57ba619b149b907072e750815c5ba63e7aa2e1163384a4"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7979b834ec7a33aafae34a90aad9f914c41fd6eaa8474e66953f3f6f7cbd4368"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef71561f82a89af6cfcbee47f0fabfdb6e63788a9258e913955d89fdd96902ab"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd829712de97753367153ed84f2de752b86cd1f7a88b55a3a775eb52eafe8a94"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00e871d83a45eee2f8688d7e6849609c2ca2a04a6d48fba3dff4deef35d14f07"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:706e7b739fdd17cb89e1fbf712d9dc21311fc2333f6d435eac2d4ee81985098c"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cc3f1c053b73f20c7ad88b0d1d23be7e7b3901229ce89f5000a8399746a6e039"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6f85739e80d13644b981a88f529d79c5bdf646b460ba190bffcaf6d57b2a9863"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:741ba2f511cc9626b7561a440f87d658aabb3d6b744a86a3c025f866b4d19e7f"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e77c90ab5997e85901da85131fd36acd0ed2221368199b65f0d11bca44549711"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:979c24cbefaf2420c4e377ecd1f165ea08cc3d1fbb44bdc51bccbbf7c66a2cb4"}, - {file = "regex-2023.10.3-cp38-cp38-win32.whl", hash = "sha256:58837f9d221744d4c92d2cf7201c6acd19623b50c643b56992cbd2b745485d3d"}, - {file = "regex-2023.10.3-cp38-cp38-win_amd64.whl", hash = "sha256:c55853684fe08d4897c37dfc5faeff70607a5f1806c8be148f1695be4a63414b"}, - {file = "regex-2023.10.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2c54e23836650bdf2c18222c87f6f840d4943944146ca479858404fedeb9f9af"}, - {file = "regex-2023.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69c0771ca5653c7d4b65203cbfc5e66db9375f1078689459fe196fe08b7b4930"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ac965a998e1388e6ff2e9781f499ad1eaa41e962a40d11c7823c9952c77123e"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c0e8fae5b27caa34177bdfa5a960c46ff2f78ee2d45c6db15ae3f64ecadde14"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c56c3d47da04f921b73ff9415fbaa939f684d47293f071aa9cbb13c94afc17d"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ef1e014eed78ab650bef9a6a9cbe50b052c0aebe553fb2881e0453717573f52"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d29338556a59423d9ff7b6eb0cb89ead2b0875e08fe522f3e068b955c3e7b59b"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9c6d0ced3c06d0f183b73d3c5920727268d2201aa0fe6d55c60d68c792ff3588"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:994645a46c6a740ee8ce8df7911d4aee458d9b1bc5639bc968226763d07f00fa"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:66e2fe786ef28da2b28e222c89502b2af984858091675044d93cb50e6f46d7af"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:11175910f62b2b8c055f2b089e0fedd694fe2be3941b3e2633653bc51064c528"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:06e9abc0e4c9ab4779c74ad99c3fc10d3967d03114449acc2c2762ad4472b8ca"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fb02e4257376ae25c6dd95a5aec377f9b18c09be6ebdefa7ad209b9137b73d48"}, - {file = "regex-2023.10.3-cp39-cp39-win32.whl", hash = "sha256:3b2c3502603fab52d7619b882c25a6850b766ebd1b18de3df23b2f939360e1bd"}, - {file = "regex-2023.10.3-cp39-cp39-win_amd64.whl", hash = "sha256:adbccd17dcaff65704c856bd29951c58a1bd4b2b0f8ad6b826dbd543fe740988"}, - {file = "regex-2023.10.3.tar.gz", hash = "sha256:3fef4f844d2290ee0ba57addcec17eec9e3df73f10a2748485dfd6a3a188cc0f"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, + {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, + {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, + {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, + {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, + {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, + {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, + {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, + {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, + {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, + {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, + {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, + {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, + {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, + {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, ] [[package]] name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3071,7 +3287,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-oauthlib" version = "1.3.1" description = "OAuthlib authentication support for Requests." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -3090,7 +3305,6 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] name = "rich" version = "13.7.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -3108,118 +3322,193 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "rpds-py" -version = "0.13.1" +version = "0.17.1" description = "Python bindings to Rust's persistent data structures (rpds)" -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "rpds_py-0.13.1-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:83feb0f682d75a09ddc11aa37ba5c07dd9b824b22915207f6176ea458474ff75"}, - {file = "rpds_py-0.13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fa84bbe22ffa108f91631935c28a623001e335d66e393438258501e618fb0dde"}, - {file = "rpds_py-0.13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e04f8c76b8d5c70695b4e8f1d0b391d8ef91df00ef488c6c1ffb910176459bc6"}, - {file = "rpds_py-0.13.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:032c242a595629aacace44128f9795110513ad27217b091e834edec2fb09e800"}, - {file = "rpds_py-0.13.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91276caef95556faeb4b8f09fe4439670d3d6206fee78d47ddb6e6de837f0b4d"}, - {file = "rpds_py-0.13.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d22f2cb82e0b40e427a74a93c9a4231335bbc548aed79955dde0b64ea7f88146"}, - {file = "rpds_py-0.13.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63c9e2794329ef070844ff9bfc012004aeddc0468dc26970953709723f76c8a5"}, - {file = "rpds_py-0.13.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c797ea56f36c6f248656f0223b11307fdf4a1886f3555eba371f34152b07677f"}, - {file = "rpds_py-0.13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:82dbcd6463e580bcfb7561cece35046aaabeac5a9ddb775020160b14e6c58a5d"}, - {file = "rpds_py-0.13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:736817dbbbd030a69a1faf5413a319976c9c8ba8cdcfa98c022d3b6b2e01eca6"}, - {file = "rpds_py-0.13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1f36a1e80ef4ed1996445698fd91e0d3e54738bf597c9995118b92da537d7a28"}, - {file = "rpds_py-0.13.1-cp310-none-win32.whl", hash = "sha256:4f13d3f6585bd07657a603780e99beda96a36c86acaba841f131e81393958336"}, - {file = "rpds_py-0.13.1-cp310-none-win_amd64.whl", hash = "sha256:545e94c84575057d3d5c62634611858dac859702b1519b6ffc58eca7fb1adfcf"}, - {file = "rpds_py-0.13.1-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:6bfe72b249264cc1ff2f3629be240d7d2fdc778d9d298087cdec8524c91cd11f"}, - {file = "rpds_py-0.13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edc91c50e17f5cd945d821f0f1af830522dba0c10267c3aab186dc3dbaab8def"}, - {file = "rpds_py-0.13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2eca04a365be380ca1f8fa48b334462e19e3382c0bb7386444d8ca43aa01c481"}, - {file = "rpds_py-0.13.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3e3ac5b602fea378243f993d8b707189f9061e55ebb4e56cb9fdef8166060f28"}, - {file = "rpds_py-0.13.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dfb5d2ab183c0efe5e7b8917e4eaa2e837aacafad8a69b89aa6bc81550eed857"}, - {file = "rpds_py-0.13.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d9793d46d3e6522ae58e9321032827c9c0df1e56cbe5d3de965facb311aed6aa"}, - {file = "rpds_py-0.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cd935c0220d012a27c20135c140f9cdcbc6249d5954345c81bfb714071b985c"}, - {file = "rpds_py-0.13.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:37b08df45f02ff1866043b95096cbe91ac99de05936dd09d6611987a82a3306a"}, - {file = "rpds_py-0.13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ad666a904212aa9a6c77da7dce9d5170008cda76b7776e6731928b3f8a0d40fa"}, - {file = "rpds_py-0.13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8a6ad8429340e0a4de89353447c6441329def3632e7b2293a7d6e873217d3c2b"}, - {file = "rpds_py-0.13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7c40851b659d958c5245c1236e34f0d065cc53dca8d978b49a032c8e0adfda6e"}, - {file = "rpds_py-0.13.1-cp311-none-win32.whl", hash = "sha256:4145172ab59b6c27695db6d78d040795f635cba732cead19c78cede74800949a"}, - {file = "rpds_py-0.13.1-cp311-none-win_amd64.whl", hash = "sha256:46a07a258bda12270de02b34c4884f200f864bba3dcd6e3a37fef36a168b859d"}, - {file = "rpds_py-0.13.1-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:ba4432301ad7eeb1b00848cf46fae0e5fecfd18a8cb5fdcf856c67985f79ecc7"}, - {file = "rpds_py-0.13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d22e0660de24bd8e9ac82f4230a22a5fe4e397265709289d61d5fb333839ba50"}, - {file = "rpds_py-0.13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76a8374b294e4ccb39ccaf11d39a0537ed107534139c00b4393ca3b542cc66e5"}, - {file = "rpds_py-0.13.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7d152ec7bb431040af2500e01436c9aa0d993f243346f0594a15755016bf0be1"}, - {file = "rpds_py-0.13.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74a2044b870df7c9360bb3ce7e12f9ddf8e72e49cd3a353a1528cbf166ad2383"}, - {file = "rpds_py-0.13.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:960e7e460fda2d0af18c75585bbe0c99f90b8f09963844618a621b804f8c3abe"}, - {file = "rpds_py-0.13.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37f79f4f1f06cc96151f4a187528c3fd4a7e1065538a4af9eb68c642365957f7"}, - {file = "rpds_py-0.13.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cd4ea56c9542ad0091dfdef3e8572ae7a746e1e91eb56c9e08b8d0808b40f1d1"}, - {file = "rpds_py-0.13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0290712eb5603a725769b5d857f7cf15cf6ca93dda3128065bbafe6fdb709beb"}, - {file = "rpds_py-0.13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0b70c1f800059c92479dc94dda41288fd6607f741f9b1b8f89a21a86428f6383"}, - {file = "rpds_py-0.13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3dd5fb7737224e1497c886fb3ca681c15d9c00c76171f53b3c3cc8d16ccfa7fb"}, - {file = "rpds_py-0.13.1-cp312-none-win32.whl", hash = "sha256:74be3b215a5695690a0f1a9f68b1d1c93f8caad52e23242fcb8ba56aaf060281"}, - {file = "rpds_py-0.13.1-cp312-none-win_amd64.whl", hash = "sha256:f47eef55297799956464efc00c74ae55c48a7b68236856d56183fe1ddf866205"}, - {file = "rpds_py-0.13.1-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:e4a45ba34f904062c63049a760790c6a2fa7a4cc4bd160d8af243b12371aaa05"}, - {file = "rpds_py-0.13.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:20147996376be452cd82cd6c17701daba69a849dc143270fa10fe067bb34562a"}, - {file = "rpds_py-0.13.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b9535aa22ab023704cfc6533e968f7e420affe802d85e956d8a7b4c0b0b5ea"}, - {file = "rpds_py-0.13.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d4fa1eeb9bea6d9b64ac91ec51ee94cc4fc744955df5be393e1c923c920db2b0"}, - {file = "rpds_py-0.13.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b2415d5a7b7ee96aa3a54d4775c1fec140476a17ee12353806297e900eaeddc"}, - {file = "rpds_py-0.13.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:577d40a72550eac1386b77b43836151cb61ff6700adacda2ad4d883ca5a0b6f2"}, - {file = "rpds_py-0.13.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af2d1648eb625a460eee07d3e1ea3a4a6e84a1fb3a107f6a8e95ac19f7dcce67"}, - {file = "rpds_py-0.13.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5b769396eb358d6b55dbf78f3f7ca631ca1b2fe02136faad5af74f0111b4b6b7"}, - {file = "rpds_py-0.13.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:249c8e0055ca597707d71c5ad85fd2a1c8fdb99386a8c6c257e1b47b67a9bec1"}, - {file = "rpds_py-0.13.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:fe30ef31172bdcf946502a945faad110e8fff88c32c4bec9a593df0280e64d8a"}, - {file = "rpds_py-0.13.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2647192facf63be9ed2d7a49ceb07efe01dc6cfb083bd2cc53c418437400cb99"}, - {file = "rpds_py-0.13.1-cp38-none-win32.whl", hash = "sha256:4011d5c854aa804c833331d38a2b6f6f2fe58a90c9f615afdb7aa7cf9d31f721"}, - {file = "rpds_py-0.13.1-cp38-none-win_amd64.whl", hash = "sha256:7cfae77da92a20f56cf89739a557b76e5c6edc094f6ad5c090b9e15fbbfcd1a4"}, - {file = "rpds_py-0.13.1-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:e9be1f7c5f9673616f875299339984da9447a40e3aea927750c843d6e5e2e029"}, - {file = "rpds_py-0.13.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:839676475ac2ccd1532d36af3d10d290a2ca149b702ed464131e450a767550df"}, - {file = "rpds_py-0.13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90031658805c63fe488f8e9e7a88b260ea121ba3ee9cdabcece9c9ddb50da39"}, - {file = "rpds_py-0.13.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ba9fbc5d6e36bfeb5292530321cc56c4ef3f98048647fabd8f57543c34174ec"}, - {file = "rpds_py-0.13.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08832078767545c5ee12561ce980714e1e4c6619b5b1e9a10248de60cddfa1fd"}, - {file = "rpds_py-0.13.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19f5aa7f5078d35ed8e344bcba40f35bc95f9176dddb33fc4f2084e04289fa63"}, - {file = "rpds_py-0.13.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80080972e1d000ad0341c7cc58b6855c80bd887675f92871221451d13a975072"}, - {file = "rpds_py-0.13.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:181ee352691c4434eb1c01802e9daa5edcc1007ff15023a320e2693fed6a661b"}, - {file = "rpds_py-0.13.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d20da6b4c7aa9ee75ad0730beaba15d65157f5beeaca54a038bb968f92bf3ce3"}, - {file = "rpds_py-0.13.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:faa12a9f34671a30ea6bb027f04ec4e1fb8fa3fb3ed030893e729d4d0f3a9791"}, - {file = "rpds_py-0.13.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7cf241dbb50ea71c2e628ab2a32b5bfcd36e199152fc44e5c1edb0b773f1583e"}, - {file = "rpds_py-0.13.1-cp39-none-win32.whl", hash = "sha256:dab979662da1c9fbb464e310c0b06cb5f1d174d09a462553af78f0bfb3e01920"}, - {file = "rpds_py-0.13.1-cp39-none-win_amd64.whl", hash = "sha256:a2b3c79586636f1fa69a7bd59c87c15fca80c0d34b5c003d57f2f326e5276575"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:5967fa631d0ed9f8511dede08bc943a9727c949d05d1efac4ac82b2938024fb7"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8308a8d49d1354278d5c068c888a58d7158a419b2e4d87c7839ed3641498790c"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0580faeb9def6d0beb7aa666294d5604e569c4e24111ada423cf9936768d95c"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2da81c1492291c1a90987d76a47c7b2d310661bf7c93a9de0511e27b796a8b46"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c9a1dc5e898ce30e2f9c0aa57181cddd4532b22b7780549441d6429d22d3b58"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4ae6f423cb7d1c6256b7482025ace2825728f53b7ac58bcd574de6ee9d242c2"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc3179e0815827cf963e634095ae5715ee73a5af61defbc8d6ca79f1bdae1d1d"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0d9f8930092558fd15c9e07198625efb698f7cc00b3dc311c83eeec2540226a8"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:d1d388d2f5f5a6065cf83c54dd12112b7389095669ff395e632003ae8999c6b8"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:08b335fb0c45f0a9e2478a9ece6a1bfb00b6f4c4780f9be3cf36479c5d8dd374"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:d11afdc5992bbd7af60ed5eb519873690d921425299f51d80aa3099ed49f2bcc"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:8c1f6c8df23be165eb0cb78f305483d00c6827a191e3a38394c658d5b9c80bbd"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:528e2afaa56d815d2601b857644aeb395afe7e59212ab0659906dc29ae68d9a6"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df2af1180b8eeececf4f819d22cc0668bfadadfd038b19a90bd2fb2ee419ec6f"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:88956c993a20201744282362e3fd30962a9d86dc4f1dcf2bdb31fab27821b61f"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee70ee5f4144a45a9e6169000b5b525d82673d5dab9f7587eccc92794814e7ac"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c5fd099acaee2325f01281a130a39da08d885e4dedf01b84bf156ec2737d78fe"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9656a09653b18b80764647d585750df2dff8928e03a706763ab40ec8c4872acc"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7ba239bb37663b2b4cd08e703e79e13321512dccd8e5f0e9451d9e53a6b8509a"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:3f55ae773abd96b1de25fc5c3fb356f491bd19116f8f854ba705beffc1ddc3c5"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:f4b15a163448ec79241fb2f1bc5a8ae1a4a304f7a48d948d208a2935b26bf8a5"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:1a3b2583c86bbfbf417304eeb13400ce7f8725376dc7d3efbf35dc5d7052ad48"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:f1059ca9a51c936c9a8d46fbc2c9a6b4c15ab3f13a97f1ad32f024b39666ba85"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f55601fb58f92e4f4f1d05d80c24cb77505dc42103ddfd63ddfdc51d3da46fa2"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcfd5f91b882eedf8d9601bd21261d6ce0e61a8c66a7152d1f5df08d3f643ab1"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6574f619e8734140d96c59bfa8a6a6e7a3336820ccd1bfd95ffa610673b650a2"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a4b9d3f5c48bbe8d9e3758e498b3c34863f2c9b1ac57a4e6310183740e59c980"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cdd6f8738e1f1d9df5b1603bb03cb30e442710e5672262b95d0f9fcb4edb0dab"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8c2bf286e5d755a075e5e97ba56b3de08cccdad6b323ab0b21cc98875176b03"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b3d4b390ee70ca9263b331ccfaf9819ee20e90dfd0201a295e23eb64a005dbef"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:db8d0f0ad92f74feb61c4e4a71f1d573ef37c22ef4dc19cab93e501bfdad8cbd"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:2abd669a39be69cdfe145927c7eb53a875b157740bf1e2d49e9619fc6f43362e"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:2c173f529666bab8e3f948b74c6d91afa22ea147e6ebae49a48229d9020a47c4"}, - {file = "rpds_py-0.13.1.tar.gz", hash = "sha256:264f3a5906c62b9df3a00ad35f6da1987d321a053895bd85f9d5c708de5c0fbf"}, + {file = "rpds_py-0.17.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:4128980a14ed805e1b91a7ed551250282a8ddf8201a4e9f8f5b7e6225f54170d"}, + {file = "rpds_py-0.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ff1dcb8e8bc2261a088821b2595ef031c91d499a0c1b031c152d43fe0a6ecec8"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d65e6b4f1443048eb7e833c2accb4fa7ee67cc7d54f31b4f0555b474758bee55"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a71169d505af63bb4d20d23a8fbd4c6ce272e7bce6cc31f617152aa784436f29"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:436474f17733c7dca0fbf096d36ae65277e8645039df12a0fa52445ca494729d"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10162fe3f5f47c37ebf6d8ff5a2368508fe22007e3077bf25b9c7d803454d921"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:720215373a280f78a1814becb1312d4e4d1077b1202a56d2b0815e95ccb99ce9"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70fcc6c2906cfa5c6a552ba7ae2ce64b6c32f437d8f3f8eea49925b278a61453"}, + {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:91e5a8200e65aaac342a791272c564dffcf1281abd635d304d6c4e6b495f29dc"}, + {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:99f567dae93e10be2daaa896e07513dd4bf9c2ecf0576e0533ac36ba3b1d5394"}, + {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:24e4900a6643f87058a27320f81336d527ccfe503984528edde4bb660c8c8d59"}, + {file = "rpds_py-0.17.1-cp310-none-win32.whl", hash = "sha256:0bfb09bf41fe7c51413f563373e5f537eaa653d7adc4830399d4e9bdc199959d"}, + {file = "rpds_py-0.17.1-cp310-none-win_amd64.whl", hash = "sha256:20de7b7179e2031a04042e85dc463a93a82bc177eeba5ddd13ff746325558aa6"}, + {file = "rpds_py-0.17.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:65dcf105c1943cba45d19207ef51b8bc46d232a381e94dd38719d52d3980015b"}, + {file = "rpds_py-0.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:01f58a7306b64e0a4fe042047dd2b7d411ee82e54240284bab63e325762c1147"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:071bc28c589b86bc6351a339114fb7a029f5cddbaca34103aa573eba7b482382"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ae35e8e6801c5ab071b992cb2da958eee76340e6926ec693b5ff7d6381441745"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149c5cd24f729e3567b56e1795f74577aa3126c14c11e457bec1b1c90d212e38"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e796051f2070f47230c745d0a77a91088fbee2cc0502e9b796b9c6471983718c"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e820ee1004327609b28db8307acc27f5f2e9a0b185b2064c5f23e815f248f8"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1957a2ab607f9added64478a6982742eb29f109d89d065fa44e01691a20fc20a"}, + {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8587fd64c2a91c33cdc39d0cebdaf30e79491cc029a37fcd458ba863f8815383"}, + {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4dc889a9d8a34758d0fcc9ac86adb97bab3fb7f0c4d29794357eb147536483fd"}, + {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2953937f83820376b5979318840f3ee47477d94c17b940fe31d9458d79ae7eea"}, + {file = "rpds_py-0.17.1-cp311-none-win32.whl", hash = "sha256:1bfcad3109c1e5ba3cbe2f421614e70439f72897515a96c462ea657261b96518"}, + {file = "rpds_py-0.17.1-cp311-none-win_amd64.whl", hash = "sha256:99da0a4686ada4ed0f778120a0ea8d066de1a0a92ab0d13ae68492a437db78bf"}, + {file = "rpds_py-0.17.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1dc29db3900cb1bb40353772417800f29c3d078dbc8024fd64655a04ee3c4bdf"}, + {file = "rpds_py-0.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:82ada4a8ed9e82e443fcef87e22a3eed3654dd3adf6e3b3a0deb70f03e86142a"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d36b2b59e8cc6e576f8f7b671e32f2ff43153f0ad6d0201250a7c07f25d570e"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3677fcca7fb728c86a78660c7fb1b07b69b281964673f486ae72860e13f512ad"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:516fb8c77805159e97a689e2f1c80655c7658f5af601c34ffdb916605598cda2"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df3b6f45ba4515632c5064e35ca7f31d51d13d1479673185ba8f9fefbbed58b9"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a967dd6afda7715d911c25a6ba1517975acd8d1092b2f326718725461a3d33f9"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dbbb95e6fc91ea3102505d111b327004d1c4ce98d56a4a02e82cd451f9f57140"}, + {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:02866e060219514940342a1f84303a1ef7a1dad0ac311792fbbe19b521b489d2"}, + {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2528ff96d09f12e638695f3a2e0c609c7b84c6df7c5ae9bfeb9252b6fa686253"}, + {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd345a13ce06e94c753dab52f8e71e5252aec1e4f8022d24d56decd31e1b9b23"}, + {file = "rpds_py-0.17.1-cp312-none-win32.whl", hash = "sha256:2a792b2e1d3038daa83fa474d559acfd6dc1e3650ee93b2662ddc17dbff20ad1"}, + {file = "rpds_py-0.17.1-cp312-none-win_amd64.whl", hash = "sha256:292f7344a3301802e7c25c53792fae7d1593cb0e50964e7bcdcc5cf533d634e3"}, + {file = "rpds_py-0.17.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:8ffe53e1d8ef2520ebcf0c9fec15bb721da59e8ef283b6ff3079613b1e30513d"}, + {file = "rpds_py-0.17.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4341bd7579611cf50e7b20bb8c2e23512a3dc79de987a1f411cb458ab670eb90"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4eb548daf4836e3b2c662033bfbfc551db58d30fd8fe660314f86bf8510b93"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b686f25377f9c006acbac63f61614416a6317133ab7fafe5de5f7dc8a06d42eb"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4e21b76075c01d65d0f0f34302b5a7457d95721d5e0667aea65e5bb3ab415c25"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b86b21b348f7e5485fae740d845c65a880f5d1eda1e063bc59bef92d1f7d0c55"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f175e95a197f6a4059b50757a3dca33b32b61691bdbd22c29e8a8d21d3914cae"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1701fc54460ae2e5efc1dd6350eafd7a760f516df8dbe51d4a1c79d69472fbd4"}, + {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9051e3d2af8f55b42061603e29e744724cb5f65b128a491446cc029b3e2ea896"}, + {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:7450dbd659fed6dd41d1a7d47ed767e893ba402af8ae664c157c255ec6067fde"}, + {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5a024fa96d541fd7edaa0e9d904601c6445e95a729a2900c5aec6555fe921ed6"}, + {file = "rpds_py-0.17.1-cp38-none-win32.whl", hash = "sha256:da1ead63368c04a9bded7904757dfcae01eba0e0f9bc41d3d7f57ebf1c04015a"}, + {file = "rpds_py-0.17.1-cp38-none-win_amd64.whl", hash = "sha256:841320e1841bb53fada91c9725e766bb25009cfd4144e92298db296fb6c894fb"}, + {file = "rpds_py-0.17.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:f6c43b6f97209e370124baf2bf40bb1e8edc25311a158867eb1c3a5d449ebc7a"}, + {file = "rpds_py-0.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7d63ec01fe7c76c2dbb7e972fece45acbb8836e72682bde138e7e039906e2c"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81038ff87a4e04c22e1d81f947c6ac46f122e0c80460b9006e6517c4d842a6ec"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:810685321f4a304b2b55577c915bece4c4a06dfe38f6e62d9cc1d6ca8ee86b99"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:25f071737dae674ca8937a73d0f43f5a52e92c2d178330b4c0bb6ab05586ffa6"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa5bfb13f1e89151ade0eb812f7b0d7a4d643406caaad65ce1cbabe0a66d695f"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfe07308b311a8293a0d5ef4e61411c5c20f682db6b5e73de6c7c8824272c256"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a000133a90eea274a6f28adc3084643263b1e7c1a5a66eb0a0a7a36aa757ed74"}, + {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d0e8a6434a3fbf77d11448c9c25b2f25244226cfbec1a5159947cac5b8c5fa4"}, + {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:efa767c220d94aa4ac3a6dd3aeb986e9f229eaf5bce92d8b1b3018d06bed3772"}, + {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:dbc56680ecf585a384fbd93cd42bc82668b77cb525343170a2d86dafaed2a84b"}, + {file = "rpds_py-0.17.1-cp39-none-win32.whl", hash = "sha256:270987bc22e7e5a962b1094953ae901395e8c1e1e83ad016c5cfcfff75a15a3f"}, + {file = "rpds_py-0.17.1-cp39-none-win_amd64.whl", hash = "sha256:2a7b2f2f56a16a6d62e55354dd329d929560442bd92e87397b7a9586a32e3e76"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a3264e3e858de4fc601741498215835ff324ff2482fd4e4af61b46512dd7fc83"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f2f3b28b40fddcb6c1f1f6c88c6f3769cd933fa493ceb79da45968a21dccc920"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9584f8f52010295a4a417221861df9bea4c72d9632562b6e59b3c7b87a1522b7"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c64602e8be701c6cfe42064b71c84ce62ce66ddc6422c15463fd8127db3d8066"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:060f412230d5f19fc8c8b75f315931b408d8ebf56aec33ef4168d1b9e54200b1"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9412abdf0ba70faa6e2ee6c0cc62a8defb772e78860cef419865917d86c7342"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9737bdaa0ad33d34c0efc718741abaafce62fadae72c8b251df9b0c823c63b22"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9f0e4dc0f17dcea4ab9d13ac5c666b6b5337042b4d8f27e01b70fae41dd65c57"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1db228102ab9d1ff4c64148c96320d0be7044fa28bd865a9ce628ce98da5973d"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:d8bbd8e56f3ba25a7d0cf980fc42b34028848a53a0e36c9918550e0280b9d0b6"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:be22ae34d68544df293152b7e50895ba70d2a833ad9566932d750d3625918b82"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bf046179d011e6114daf12a534d874958b039342b347348a78b7cdf0dd9d6041"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:1a746a6d49665058a5896000e8d9d2f1a6acba8a03b389c1e4c06e11e0b7f40d"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0b8bf5b8db49d8fd40f54772a1dcf262e8be0ad2ab0206b5a2ec109c176c0a4"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f7f4cb1f173385e8a39c29510dd11a78bf44e360fb75610594973f5ea141028b"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7fbd70cb8b54fe745301921b0816c08b6d917593429dfc437fd024b5ba713c58"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bdf1303df671179eaf2cb41e8515a07fc78d9d00f111eadbe3e14262f59c3d0"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad059a4bd14c45776600d223ec194e77db6c20255578bb5bcdd7c18fd169361"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3664d126d3388a887db44c2e293f87d500c4184ec43d5d14d2d2babdb4c64cad"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:698ea95a60c8b16b58be9d854c9f993c639f5c214cf9ba782eca53a8789d6b19"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:c3d2010656999b63e628a3c694f23020322b4178c450dc478558a2b6ef3cb9bb"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:938eab7323a736533f015e6069a7d53ef2dcc841e4e533b782c2bfb9fb12d84b"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e626b365293a2142a62b9a614e1f8e331b28f3ca57b9f05ebbf4cf2a0f0bdc5"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:380e0df2e9d5d5d339803cfc6d183a5442ad7ab3c63c2a0982e8c824566c5ccc"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b760a56e080a826c2e5af09002c1a037382ed21d03134eb6294812dda268c811"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5576ee2f3a309d2bb403ec292d5958ce03953b0e57a11d224c1f134feaf8c40f"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3c3461ebb4c4f1bbc70b15d20b565759f97a5aaf13af811fcefc892e9197ba"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:637b802f3f069a64436d432117a7e58fab414b4e27a7e81049817ae94de45d8d"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffee088ea9b593cc6160518ba9bd319b5475e5f3e578e4552d63818773c6f56a"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ac732390d529d8469b831949c78085b034bff67f584559340008d0f6041a049"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:93432e747fb07fa567ad9cc7aaadd6e29710e515aabf939dfbed8046041346c6"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:7b7d9ca34542099b4e185b3c2a2b2eda2e318a7dbde0b0d83357a6d4421b5296"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:0387ce69ba06e43df54e43968090f3626e231e4bc9150e4c3246947567695f68"}, + {file = "rpds_py-0.17.1.tar.gz", hash = "sha256:0210b2668f24c078307260bf88bdac9d6f1093635df5123789bfee4d8d7fc8e7"}, +] + +[[package]] +name = "ruamel-yaml" +version = "0.18.5" +description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruamel.yaml-0.18.5-py3-none-any.whl", hash = "sha256:a013ac02f99a69cdd6277d9664689eb1acba07069f912823177c5eced21a6ada"}, + {file = "ruamel.yaml-0.18.5.tar.gz", hash = "sha256:61917e3a35a569c1133a8f772e1226961bf5a1198bea7e23f06a0841dea1ab0e"}, +] + +[package.dependencies] +"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.13\""} + +[package.extras] +docs = ["mercurial (>5.7)", "ryd"] +jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] + +[[package]] +name = "ruamel-yaml-clib" +version = "0.2.8" +description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" +optional = false +python-versions = ">=3.6" +files = [ + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b"}, + {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:77159f5d5b5c14f7c34073862a6b7d34944075d9f93e681638f6d753606c6ce6"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:305889baa4043a09e5b76f8e2a51d4ffba44259f6b4c72dec8ca56207d9c6fe1"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:a1a45e0bb052edf6a1d3a93baef85319733a888363938e1fc9924cb00c8df24c"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, + {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, ] [[package]] name = "rx" version = "1.6.3" description = "Reactive Extensions (Rx) for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -3230,7 +3519,6 @@ files = [ name = "scp" version = "0.14.5" description = "scp module for paramiko" -category = "main" optional = false python-versions = "*" files = [ @@ -3243,14 +3531,13 @@ paramiko = "*" [[package]] name = "setuptools" -version = "69.0.0" +version = "69.0.3" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.0.0-py3-none-any.whl", hash = "sha256:eb03b43f23910c5fd0909cb677ad017cd9531f493d27f8b3f5316ff1fb07390e"}, - {file = "setuptools-69.0.0.tar.gz", hash = "sha256:4c65d4f7891e5b046e9146913b87098144de2ca2128fbc10135b8556a6ddd946"}, + {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, + {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, ] [package.extras] @@ -3262,7 +3549,6 @@ testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jar name = "singledispatch" version = "4.1.0" description = "Backport functools.singledispatch to older Pythons." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3278,7 +3564,6 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -3290,7 +3575,6 @@ files = [ name = "smmap" version = "5.0.1" description = "A pure Python implementation of a sliding window memory map manager" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3298,11 +3582,21 @@ files = [ {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, ] +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] + [[package]] name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "dev" optional = false python-versions = "*" files = [ @@ -3314,7 +3608,6 @@ files = [ name = "social-auth-app-django" version = "5.2.0" description = "Python Social Authentication, Django integration." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3328,14 +3621,13 @@ social-auth-core = ">=4.4.1" [[package]] name = "social-auth-core" -version = "4.5.0" +version = "4.5.1" description = "Python social authentication made simple." -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "social-auth-core-4.5.0.tar.gz", hash = "sha256:3d4154f45c0bacffe54ccf4361bce7e66cf5f5cd1bb0ebb7507ad09a1b07d9d9"}, - {file = "social_auth_core-4.5.0-py3-none-any.whl", hash = "sha256:f4ae5d8e503a401f319498bcad59fd1f6c473517eeae89c22299250f63c33365"}, + {file = "social-auth-core-4.5.1.tar.gz", hash = "sha256:307a4ba64d4f3ec86e4389163eac1d8b8656ffe5ab2e964aeff043ab00b3a662"}, + {file = "social_auth_core-4.5.1-py3-none-any.whl", hash = "sha256:54d0c598bf6ea0ec12bbcf78bee035c7cd604b5d781d80b7997e9e033c3ac05d"}, ] [package.dependencies] @@ -3357,7 +3649,6 @@ saml = ["python3-saml (>=1.5.0)"] name = "sqlparse" version = "0.4.4" description = "A non-validating SQL parser." -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -3374,7 +3665,6 @@ test = ["pytest", "pytest-cov"] name = "stevedore" version = "5.1.0" description = "Manage dynamic plugins for Python applications" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3385,11 +3675,27 @@ files = [ [package.dependencies] pbr = ">=2.0.0,<2.1.0 || >2.1.0" +[[package]] +name = "structlog" +version = "22.3.0" +description = "Structured Logging for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "structlog-22.3.0-py3-none-any.whl", hash = "sha256:b403f344f902b220648fa9f286a23c0cc5439a5844d271fec40562dbadbc70ad"}, + {file = "structlog-22.3.0.tar.gz", hash = "sha256:e7509391f215e4afb88b1b80fa3ea074be57a5a17d794bd436a5c949da023333"}, +] + +[package.extras] +dev = ["structlog[docs,tests,typing]"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-mermaid", "twisted"] +tests = ["coverage[toml]", "freezegun (>=0.2.8)", "pretend", "pytest (>=6.0)", "pytest-asyncio (>=0.17)", "simplejson"] +typing = ["mypy", "rich", "twisted"] + [[package]] name = "svgwrite" version = "1.4.3" description = "A Python library to create SVG drawings." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3401,7 +3707,6 @@ files = [ name = "text-unidecode" version = "1.3" description = "The most basic Text::Unidecode port" -category = "main" optional = false python-versions = "*" files = [ @@ -3413,7 +3718,6 @@ files = [ name = "textfsm" version = "1.1.3" description = "Python module for parsing semi-structured text into python tables." -category = "main" optional = false python-versions = "*" files = [ @@ -3429,7 +3733,6 @@ six = "*" name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" -category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -3441,7 +3744,6 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3453,7 +3755,6 @@ files = [ name = "tomlkit" version = "0.12.3" description = "Style preserving TOML library" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3465,7 +3766,6 @@ files = [ name = "transitions" version = "0.9.0" description = "A lightweight, object-oriented Python state machine implementation with many extensions." -category = "main" optional = false python-versions = "*" files = [ @@ -3484,7 +3784,6 @@ test = ["pytest"] name = "ttp" version = "0.9.5" description = "Template Text Parser" -category = "main" optional = false python-versions = ">=2.7,<4.0" files = [ @@ -3498,14 +3797,13 @@ full = ["cerberus (>=1.3.0,<1.4.0)", "deepdiff (>=5.8.0,<5.9.0)", "jinja2 (>=3.0 [[package]] name = "ttp-templates" -version = "0.3.5" +version = "0.3.6" description = "Template Text Parser Templates collections" -category = "main" optional = false python-versions = ">=3.6,<4.0" files = [ - {file = "ttp_templates-0.3.5-py3-none-any.whl", hash = "sha256:4985a68640468127a0e31021672039cd88a8b9c3dd9289cad67839209cddaf30"}, - {file = "ttp_templates-0.3.5.tar.gz", hash = "sha256:e59870d4f65bd4aaf89178dc9065a7db8b80a23d5d79b5d6ffd041312d5ec5a6"}, + {file = "ttp_templates-0.3.6-py3-none-any.whl", hash = "sha256:a126a70b23c428b541f458400eac98582894937ef3825c423d08d4384b4867ae"}, + {file = "ttp_templates-0.3.6.tar.gz", hash = "sha256:f05ac74e9ed75726fab347a144493ca7ffcd3ef298bc54fc09e2fce83c42916d"}, ] [package.dependencies] @@ -3516,33 +3814,30 @@ docs = ["mkdocs (==1.2.4)", "mkdocs-material (==7.2.2)", "mkdocs-material-extens [[package]] name = "typing-extensions" -version = "4.8.0" +version = "4.9.0" description = "Backported and Experimental Type Hints for Python 3.8+" -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, - {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, + {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, + {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, ] [[package]] name = "tzdata" -version = "2023.3" +version = "2023.4" description = "Provider of IANA time zone data" -category = "main" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, - {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, + {file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"}, + {file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"}, ] [[package]] name = "uritemplate" version = "4.1.1" description = "Implementation of RFC 6570 URI Templates" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3552,26 +3847,24 @@ files = [ [[package]] name = "urllib3" -version = "2.1.0" +version = "1.26.18" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, - {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, + {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, + {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] +brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "vine" version = "5.1.0" description = "Python promises." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3583,7 +3876,6 @@ files = [ name = "watchdog" version = "3.0.0" description = "Filesystem events monitoring" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3621,21 +3913,19 @@ watchmedo = ["PyYAML (>=3.10)"] [[package]] name = "wcwidth" -version = "0.2.10" +version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" -category = "main" optional = false python-versions = "*" files = [ - {file = "wcwidth-0.2.10-py2.py3-none-any.whl", hash = "sha256:aec5179002dd0f0d40c456026e74a729661c9d468e1ed64405e3a6c2176ca36f"}, - {file = "wcwidth-0.2.10.tar.gz", hash = "sha256:390c7454101092a6a5e43baad8f83de615463af459201709556b6e4b1c861f97"}, + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, ] [[package]] name = "wrapt" version = "1.16.0" description = "Module for decorators, wrappers and monkey patching." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3715,7 +4005,6 @@ files = [ name = "yamllint" version = "1.33.0" description = "A linter for YAML files." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3734,7 +4023,6 @@ dev = ["doc8", "flake8", "flake8-import-order", "rstcheck[sphinx]", "sphinx"] name = "yamlordereddictloader" version = "0.4.2" description = "YAML loader and dumper for PyYAML allowing to keep keys order." -category = "main" optional = false python-versions = "*" files = [ @@ -3749,7 +4037,6 @@ pyyaml = "*" name = "zipp" version = "3.17.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3764,4 +4051,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.8,<3.12" -content-hash = "3eeabf46ff196a3c23659ada1fe5d50b0bd1a788f9c858f3a475d39b7e12bd80" +content-hash = "e94baa051993eec15b367e1a37faa54fb1099c3a96bd29d3841e2123d3414eed" From 82c26109c75cc974f82578038a365493d7b47981 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Fri, 19 Jan 2024 17:31:30 -0600 Subject: [PATCH 008/225] initial cleanup of new job and nornir structures --- development/nautobot_config.py | 5 +- nautobot_device_onboarding/constants.py | 5 + .../diffsync/adapters/onboarding_adapters.py | 9 +- .../diffsync/models/onboarding_models.py | 7 +- nautobot_device_onboarding/jobs.py | 222 +- .../nornir_plays/command_getter.py | 8 + .../nornir_plays/empty_inventory.py | 13 + .../utils/inventory_creator.py | 87 + poetry.lock | 1791 +++++++++++------ 9 files changed, 1328 insertions(+), 819 deletions(-) create mode 100755 nautobot_device_onboarding/nornir_plays/command_getter.py create mode 100755 nautobot_device_onboarding/nornir_plays/empty_inventory.py create mode 100755 nautobot_device_onboarding/utils/inventory_creator.py diff --git a/development/nautobot_config.py b/development/nautobot_config.py index 4b51769f..aca04327 100644 --- a/development/nautobot_config.py +++ b/development/nautobot_config.py @@ -138,9 +138,8 @@ # Apps configuration settings. These settings are used by various Apps that the user may have installed. # Each key in the dictionary is the name of an installed App and its value is a dictionary of settings. PLUGINS_CONFIG = { - 'nautobot_device_onboarding': { - }, -"nautobot_ssot": { + "nautobot_device_onboarding": {}, + "nautobot_ssot": { "hide_example_jobs": is_truthy(os.getenv("NAUTOBOT_SSOT_HIDE_EXAMPLE_JOBS")), }, "nautobot_plugin_nornir": { diff --git a/nautobot_device_onboarding/constants.py b/nautobot_device_onboarding/constants.py index caccc432..ea2332c2 100644 --- a/nautobot_device_onboarding/constants.py +++ b/nautobot_device_onboarding/constants.py @@ -8,3 +8,8 @@ "juniper_junos": "junos", "cisco_xr": "iosxr", } + +PLATFORM_COMMAND_MAP = { + "cisco_ios": ["show version", "show inventory", "show interfaces"], + "cisco_nxos": ["show version", "show inventory", "show interface"], +} diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index d0bae98a..c5401ae8 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -23,7 +23,7 @@ "manufacturer": "Cisco", "platform": "IOS", "network_driver": "cisco_ios", - "prefix": "10.0.0.0", # this is the network field on the Prefix model + "prefix": "10.0.0.0", # this is the network field on the Prefix model "prefix_length": 8, "mask_length": 24, } @@ -97,8 +97,11 @@ def load_devices(self): command_getter_job = JobModel.objects.get(name="Command Getter for Device Onboarding").job_task result = command_getter_job.s() - result.apply_async(args=self.job.job_result.task_args, kwargs=self.job.job_result.task_kwargs, **self.job.job_result.celery_kwargs) - + result.apply_async( + args=self.job.job_result.task_args, + kwargs=self.job.job_result.task_kwargs, + **self.job.job_result.celery_kwargs, + ) for ip_address in mock_data: if self.job.debug: diff --git a/nautobot_device_onboarding/diffsync/models/onboarding_models.py b/nautobot_device_onboarding/diffsync/models/onboarding_models.py index 162d36bc..947336a9 100644 --- a/nautobot_device_onboarding/diffsync/models/onboarding_models.py +++ b/nautobot_device_onboarding/diffsync/models/onboarding_models.py @@ -3,8 +3,7 @@ from typing import List, Optional import netaddr -from nautobot.dcim.models import (Device, DeviceType, Interface, Manufacturer, - Platform) +from nautobot.dcim.models import Device, DeviceType, Interface, Manufacturer, Platform from nautobot.ipam.models import IPAddress, Prefix from nautobot_ssot.contrib import NautobotModel @@ -93,12 +92,12 @@ class OnboardingIPAddress(NautobotModel): _modelname = "ip_address" _model = IPAddress _identifiers = ( - "parent__namespace__name", + "parent__namespace__name", "parent__network", "parent__prefix_length", "host", "mask_length", - ) + ) parent__namespace__name: str parent__network: str diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index a5df60af..a10eb5eb 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -1,51 +1,42 @@ """Device Onboarding Jobs.""" +from diffsync.enum import DiffSyncFlags from django.conf import settings from django.templatetags.static import static -from nautobot.apps.jobs import Job, ObjectVar, IntegerVar, StringVar, BooleanVar +from nautobot.apps.jobs import BooleanVar, IntegerVar, Job, ObjectVar, StringVar from nautobot.core.celery import register_jobs -from nautobot.dcim.models import Location, DeviceType, Platform -from nautobot.ipam.models import Namespace -from nautobot.extras.models import Role, SecretsGroup, SecretsGroupAssociation, Status +from nautobot.dcim.models import DeviceType, Location, Platform from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices - +from nautobot.extras.models import Role, SecretsGroup, SecretsGroupAssociation, Status +from nautobot.ipam.models import Namespace +from nautobot_device_onboarding.diffsync.adapters.network_importer_adapters import ( + NetworkImporterNautobotAdapter, + NetworkImporterNetworkAdapter, +) +from nautobot_device_onboarding.diffsync.adapters.onboarding_adapters import ( + OnboardingNautobotAdapter, + OnboardingNetworkAdapter, +) from nautobot_device_onboarding.exceptions import OnboardException from nautobot_device_onboarding.helpers import onboarding_task_fqdn_to_ip from nautobot_device_onboarding.netdev_keeper import NetdevKeeper -from nautobot_device_onboarding.diffsync.adapters.onboarding_adapters import OnboardingNautobotAdapter, OnboardingNetworkAdapter -from nautobot_device_onboarding.diffsync.adapters.network_importer_adapters import NetworkImporterNautobotAdapter, NetworkImporterNetworkAdapter +from nautobot_device_onboarding.nornir_plays.command_getter import netmiko_send_commands +from nautobot_device_onboarding.nornir_plays.empty_inventory import EmptyInventory +from nautobot_device_onboarding.utils.inventory_creator import _set_inventory from nautobot_ssot.jobs.base import DataSource -from diffsync.enum import DiffSyncFlags - -from django.conf import settings -from nautobot.apps.jobs import Job, ObjectVar, IntegerVar, StringVar, BooleanVar -from nautobot.core.celery import register_jobs -from nautobot.dcim.models import Location, DeviceType, Platform -from nautobot.extras.models import Role, SecretsGroup, SecretsGroupAssociation -from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices -from netmiko import SSHDetect from nornir import InitNornir - -from nornir_netmiko.tasks import netmiko_send_command +from nornir.core.inventory import ConnectionOptions, Defaults, Groups, Host, Hosts, Inventory from nornir.core.plugins.inventory import InventoryPluginRegister from nornir.core.task import Result, Task -from nornir.core.inventory import ( - Inventory, - ConnectionOptions, - Defaults, - Groups, - Host, - Hosts, -) +from nornir_netmiko.tasks import netmiko_send_command -from nautobot_device_onboarding.exceptions import OnboardException -from nautobot_device_onboarding.helpers import onboarding_task_fqdn_to_ip -from nautobot_device_onboarding.netdev_keeper import NetdevKeeper +InventoryPluginRegister.register("empty-inventory", EmptyInventory) PLUGIN_SETTINGS = settings.PLUGINS_CONFIG["nautobot_device_onboarding"] name = "Device Onboarding/Network Importer" + class OnboardingTask(Job): # pylint: disable=too-many-instance-attributes """Nautobot Job for onboarding a new device.""" @@ -226,23 +217,23 @@ class Meta: name = "Sync Devices" description = "Synchronize basic device information into Nautobot" - debug = BooleanVar( default=False, description="Enable for more verbose logging.",) + debug = BooleanVar( + default=False, + description="Enable for more verbose logging.", + ) location = ObjectVar( model=Location, query_params={"content_type": "dcim.device"}, description="Assigned Location for the onboarded device(s)", ) - namespace = ObjectVar( - model=Namespace, - description="Namespace ip addresses belong to." - ) + namespace = ObjectVar(model=Namespace, description="Namespace ip addresses belong to.") ip_addresses = StringVar( description="IP Address of the device to onboard, specify in a comma separated list for multiple devices.", label="IPv4 Addresses", ) management_only_interface = BooleanVar( - default=False, + default=False, label="Set Management Only", description="If True, interfaces that are created or updated will be set to management only. If False, the interface will be set to not be management only.", ) @@ -274,7 +265,7 @@ class Meta: required=False, description="Device platform. Define ONLY to override auto-recognition of platform.", ) - + def load_source_adapter(self): """Load onboarding network adapter.""" self.source_adapter = OnboardingNetworkAdapter(job=self, sync=self.sync) @@ -304,6 +295,7 @@ def run(self, dryrun, memory_profiling, *args, **kwargs): # pylint:disable=argu self.platform = kwargs["platform"] super().run(dryrun, memory_profiling, *args, **kwargs) + class SSOTNetworkImporter(DataSource): """Job syncing extended device attributes into Nautobot.""" @@ -313,21 +305,15 @@ class Meta: """Metadata about this Job.""" name = "Sync Network Data" - description = "Synchronize extended device attribute information into Nautobot; "\ - "including Interfaces, IPAddresses, Prefixes, Vlans and Cables." - - -PLATFORM_COMMAND_MAP = { - "cisco_ios": ["show version", "show inventory", "show interfaces"], - "cisco_nxos": ["show version", "show inventory", "show interface"], - } + description = ( + "Synchronize extended device attribute information into Nautobot; " + "including Interfaces, IPAddresses, Prefixes, Vlans and Cables." + ) -def netmiko_send_commands(task: Task): - platform = task.host.platform or 'default' - for command in PLATFORM_COMMAND_MAP.get(platform): - task.run(task=netmiko_send_command, command_string=command, use_textfsm=True) class CommandGetterDO(Job): + """Simple Job to Execute Show Command.""" + class Meta: # pylint: disable=too-few-public-methods """Meta object boilerplate for onboarding.""" @@ -336,21 +322,8 @@ class Meta: # pylint: disable=too-few-public-methods has_sensitive_variables = False hidden = False - class EmptyInventory: - """Creates an empty Nornir Inventory to be populated later.""" - def __init__(self, *args, **kwargs): - pass - - def load(self) -> Inventory: - """Create a default empty inventory.""" - hosts = Hosts() - defaults = Defaults(data={}) - groups = Groups() - return Inventory(hosts=hosts, groups=groups, defaults=defaults) - - InventoryPluginRegister.register("empty-inventory", EmptyInventory) - def __init__(self, *args, **kwargs): + """Initialize Command Getter Job.""" self.username = None self.password = None self.secret = None @@ -361,61 +334,14 @@ def __init__(self, *args, **kwargs): self.timeout = None super().__init__(*args, **kwargs) - def _parse_credentials(self, credentials): - """Parse and return dictionary of credentials.""" - if credentials: - self.logger.info("Attempting to parse credentials from selected SecretGroup") - try: - self.username = credentials.get_secret_value( - access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, - secret_type=SecretsGroupSecretTypeChoices.TYPE_USERNAME, - ) - self.password = credentials.get_secret_value( - access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, - secret_type=SecretsGroupSecretTypeChoices.TYPE_PASSWORD, - ) - try: - self.secret = credentials.get_secret_value( - access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, - secret_type=SecretsGroupSecretTypeChoices.TYPE_SECRET, - ) - except Exception as e: - self.secret = None - except Exception as err: - self.logger.exception(f"Unable to use SecretsGroup selected, ensure Access Type is set to Generic & at minimum Username & Password types are set.", {e} - ) - raise OnboardException("fail-credentials - Unable to parse selected credentials.") from err - - else: - self.logger.info("Using napalm credentials configured in nautobot_config.py") - self.username = settings.NAPALM_USERNAME - self.password = settings.NAPALM_PASSWORD - self.secret = settings.NAPALM_ARGS.get("secret", None) - - def guess_netmiko_device_type(self, hostname, username, password): - """Guess the device type of host, based on Netmiko.""" - guessed_device_type = None - - netmiko_optional_args = {} - - remote_device = { - "device_type": "autodetect", - "host": hostname, - "username": username, - "password": password, - **netmiko_optional_args, - } - - try: - guesser = SSHDetect(**remote_device) - guessed_device_type = guesser.autodetect() - - except Exception as err: - print(err) - return guessed_device_type - def run(self): - mock_job_data = {"ip4address": "174.51.52.76,10.1.1.1", "platform": "cisco_ios", "secrets_group": SecretsGroup.objects.get(name="Cisco Devices"), "port": 8922,"timeout": 30} + mock_job_data = { + "ip4address": "174.51.52.76,10.1.1.1", + "platform": "cisco_ios", + "secrets_group": SecretsGroup.objects.get(name="Cisco Devices"), + "port": 8922, + "timeout": 30, + } """Process onboarding task from ssot-ni job.""" self.ip4address = mock_job_data["ip4address"] @@ -427,50 +353,44 @@ def run(self): # Initiate Nornir instance with empty inventory try: with InitNornir(inventory={"plugin": "empty-inventory"}) as nr: - - # Parse credentials from SecretsGroup - self._parse_credentials(mock_job_data["secrets_group"]) - - # Build Nornir Inventory ip_address = mock_job_data["ip4address"].split(",") self.platform = mock_job_data.get("platform", None) - for h in ip_address: - if not self.platform: - self.platform = self.guess_netmiko_device_type(h, self.username, self.password) - - host = Host( - name=h, - hostname=h, - port=mock_job_data["port"], - username=self.username, - password=self.password, - platform=self.platform, - connection_options={ - "netmiko": ConnectionOptions( - hostname=h, - port=mock_job_data["port"], - username=self.username, - password=self.password, - platform=self.platform, - ) - }, - ) - nr.inventory.hosts.update({h: host}) - self.logger.info(nr.inventory.hosts) + inventory_constructed = _set_inventory(ip_address, self.platform, self.port, self.secrets_group) + nr.inventory.hosts.update(inventory_constructed) + self.logger.info(nr.inventory.hosts) - self.logger.info(f"Inventory built for {len(ip_address)} devices") + self.logger.info("Inventory built for %s devices", len(ip_address)) results = nr.run(task=netmiko_send_commands) - + for agg_result in results: for r in results[agg_result]: - self.logger.info(f"host: {r.host}") - self.logger.info(f"result: {r.result}") - + self.logger.info("host: %s", r.host) + self.logger.info("result: %s", r.result) + except Exception as err: - self.logger.info(f"Error: {err}") + self.logger.info("Error: %s", err) return err - return {"addtional_data": "working"} + # return { + # "10.1.1.8": { + # "command_output_results": True, + # "hostname": "demo-cisco-xe", + # "serial_number": "9ABUXU580QS", + # "device_type": "CSR1000V2", + # "mgmt_ip_address": "10.1.1.8", + # "mgmt_interface": "GigabitEthernet1", + # "manufacturer": "Cisco", + # "platform": "IOS", + # "network_driver": "cisco_ios", + # "prefix": "10.0.0.0", # this is the network field on the Prefix model + # "prefix_length": 8, + # "mask_length": 24, + # }, + # "10.1.1.9": { + # "command_output_results": False, + # } + # } + return {"additonal_data": "This worked"} jobs = [OnboardingTask, SSOTDeviceOnboarding, CommandGetterDO] diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py new file mode 100755 index 00000000..083e861a --- /dev/null +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -0,0 +1,8 @@ +from nautobot_device_onboarding.constants import PLATFORM_COMMAND_MAP +from nornir.core.task import Task +from nornir_netmiko.tasks import netmiko_send_command + + +def netmiko_send_commands(task: Task): + for command in PLATFORM_COMMAND_MAP.get(task.host.platform, "default"): + task.run(task=netmiko_send_command, command_string=command, use_textfsm=True) diff --git a/nautobot_device_onboarding/nornir_plays/empty_inventory.py b/nautobot_device_onboarding/nornir_plays/empty_inventory.py new file mode 100755 index 00000000..8d13a0ae --- /dev/null +++ b/nautobot_device_onboarding/nornir_plays/empty_inventory.py @@ -0,0 +1,13 @@ +"""Empty Nornir Inventory Plugin.""" +from nornir.core.inventory import Defaults, Groups, Hosts, Inventory + + +class EmptyInventory: + """Creates an empty Nornir Inventory to be populated later.""" + + def load(self) -> Inventory: + """Create a default empty inventory.""" + hosts = Hosts() + defaults = Defaults(data={}) + groups = Groups() + return Inventory(hosts=hosts, groups=groups, defaults=defaults) diff --git a/nautobot_device_onboarding/utils/inventory_creator.py b/nautobot_device_onboarding/utils/inventory_creator.py new file mode 100755 index 00000000..3cf61ebb --- /dev/null +++ b/nautobot_device_onboarding/utils/inventory_creator.py @@ -0,0 +1,87 @@ +"""Inventory Creator and Helpers.""" + +from django.conf import settings +from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices +from nautobot_device_onboarding.exceptions import OnboardException +from netmiko import SSHDetect +from nornir.core.inventory import ConnectionOptions, Host + + +def _parse_credentials(credentials): + """Parse and return dictionary of credentials.""" + if credentials: + try: + username = credentials.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, + secret_type=SecretsGroupSecretTypeChoices.TYPE_USERNAME, + ) + password = credentials.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, + secret_type=SecretsGroupSecretTypeChoices.TYPE_PASSWORD, + ) + try: + secret = credentials.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, + secret_type=SecretsGroupSecretTypeChoices.TYPE_SECRET, + ) + except: + secret = None + except Exception as err: + raise OnboardException("fail-credentials - Unable to parse selected credentials.") from err + else: + username = settings.NAPALM_USERNAME + password = settings.NAPALM_PASSWORD + secret = settings.NAPALM_ARGS.get("secret", None) + return (username, password, secret) + + +def guess_netmiko_device_type(hostname, username, password): + """Guess the device type of host, based on Netmiko.""" + guessed_device_type = None + + netmiko_optional_args = {} + + remote_device = { + "device_type": "autodetect", + "host": hostname, + "username": username, + "password": password, + **netmiko_optional_args, + } + + try: + guesser = SSHDetect(**remote_device) + guessed_device_type = guesser.autodetect() + + except Exception as err: + print(err) + return guessed_device_type + + +def _set_inventory(ips, platform, port, secrets_group): + """Construct Nornir Inventory.""" + inv = {} + username, password, secret = _parse_credentials(secrets_group) + for host_ip in ips: + if not platform: + platform = guess_netmiko_device_type(host_ip, username, password) + + host = Host( + name=host_ip, + hostname=host_ip, + port=port, + username=username, + password=password, + platform=platform, + connection_options={ + "netmiko": ConnectionOptions( + hostname=host_ip, + port=port, + username=username, + password=password, + platform=platform, + ) + }, + ) + inv.update({host_ip: host}) + return inv diff --git a/poetry.lock b/poetry.lock index f91e0c4e..5848877e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -27,6 +27,29 @@ files = [ {file = "aniso8601-7.0.0.tar.gz", hash = "sha256:513d2b6637b7853806ae79ffaca6f3e8754bdd547048f5ccc1420aec4b714f1e"}, ] +[[package]] +name = "anyio" +version = "4.2.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"}, + {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + [[package]] name = "asgiref" version = "3.7.2" @@ -79,22 +102,23 @@ files = [ [[package]] name = "attrs" -version = "23.1.0" +version = "23.2.0" description = "Classes Without Boilerplate" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, - {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, ] [package.extras] cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]", "pre-commit"] +dev = ["attrs[tests]", "pre-commit"] docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] [[package]] name = "backports-zoneinfo" @@ -130,19 +154,19 @@ tzdata = ["tzdata"] [[package]] name = "bandit" -version = "1.7.5" +version = "1.7.6" description = "Security oriented static analyser for python code." category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "bandit-1.7.5-py3-none-any.whl", hash = "sha256:75665181dc1e0096369112541a056c59d1c5f66f9bb74a8d686c3c362b83f549"}, - {file = "bandit-1.7.5.tar.gz", hash = "sha256:bdfc739baa03b880c2d15d0431b31c658ffc348e907fe197e54e0389dd59e11e"}, + {file = "bandit-1.7.6-py3-none-any.whl", hash = "sha256:36da17c67fc87579a5d20c323c8d0b1643a890a2b93f00b3d1229966624694ff"}, + {file = "bandit-1.7.6.tar.gz", hash = "sha256:72ce7bc9741374d96fb2f1c9a8960829885f1243ffde743de70a19cee353e8f3"}, ] [package.dependencies] colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} -GitPython = ">=1.0.1" +GitPython = ">=3.1.30" PyYAML = ">=5.3.1" rich = "*" stevedore = ">=1.20.0" @@ -154,33 +178,39 @@ yaml = ["PyYAML"] [[package]] name = "bcrypt" -version = "4.0.1" +version = "4.1.2" description = "Modern password hashing for your software and your servers" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2"}, - {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535"}, - {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e"}, - {file = "bcrypt-4.0.1-cp36-abi3-win32.whl", hash = "sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab"}, - {file = "bcrypt-4.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71"}, - {file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"}, + {file = "bcrypt-4.1.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:ac621c093edb28200728a9cca214d7e838529e557027ef0581685909acd28b5e"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea505c97a5c465ab8c3ba75c0805a102ce526695cd6818c6de3b1a38f6f60da1"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57fa9442758da926ed33a91644649d3e340a71e2d0a5a8de064fb621fd5a3326"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:eb3bd3321517916696233b5e0c67fd7d6281f0ef48e66812db35fc963a422a1c"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6cad43d8c63f34b26aef462b6f5e44fdcf9860b723d2453b5d391258c4c8e966"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:44290ccc827d3a24604f2c8bcd00d0da349e336e6503656cb8192133e27335e2"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:732b3920a08eacf12f93e6b04ea276c489f1c8fb49344f564cca2adb663b3e4c"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1c28973decf4e0e69cee78c68e30a523be441972c826703bb93099868a8ff5b5"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b8df79979c5bae07f1db22dcc49cc5bccf08a0380ca5c6f391cbb5790355c0b0"}, + {file = "bcrypt-4.1.2-cp37-abi3-win32.whl", hash = "sha256:fbe188b878313d01b7718390f31528be4010fed1faa798c5a1d0469c9c48c369"}, + {file = "bcrypt-4.1.2-cp37-abi3-win_amd64.whl", hash = "sha256:9800ae5bd5077b13725e2e3934aa3c9c37e49d3ea3d06318010aa40f54c63551"}, + {file = "bcrypt-4.1.2-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:71b8be82bc46cedd61a9f4ccb6c1a493211d031415a34adde3669ee1b0afbb63"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e3c6642077b0c8092580c819c1684161262b2e30c4f45deb000c38947bf483"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:387e7e1af9a4dd636b9505a465032f2f5cb8e61ba1120e79a0e1cd0b512f3dfc"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f70d9c61f9c4ca7d57f3bfe88a5ccf62546ffbadf3681bb1e268d9d2e41c91a7"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2a298db2a8ab20056120b45e86c00a0a5eb50ec4075b6142db35f593b97cb3fb"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ba55e40de38a24e2d78d34c2d36d6e864f93e0d79d0b6ce915e4335aa81d01b1"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3566a88234e8de2ccae31968127b0ecccbb4cddb629da744165db72b58d88ca4"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b90e216dc36864ae7132cb151ffe95155a37a14e0de3a8f64b49655dd959ff9c"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:69057b9fc5093ea1ab00dd24ede891f3e5e65bee040395fb1e66ee196f9c9b4a"}, + {file = "bcrypt-4.1.2-cp39-abi3-win32.whl", hash = "sha256:02d9ef8915f72dd6daaef40e0baeef8a017ce624369f09754baf32bb32dba25f"}, + {file = "bcrypt-4.1.2-cp39-abi3-win_amd64.whl", hash = "sha256:be3ab1071662f6065899fe08428e45c16aa36e28bc42921c4901a191fda6ee42"}, + {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d75fc8cd0ba23f97bae88a6ec04e9e5351ff3c6ad06f38fe32ba50cbd0d11946"}, + {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:a97e07e83e3262599434816f631cc4c7ca2aa8e9c072c1b1a7fec2ae809a1d2d"}, + {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e51c42750b7585cee7892c2614be0d14107fad9581d1738d954a262556dd1aab"}, + {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba4e4cc26610581a6329b3937e02d319f5ad4b85b074846bf4fef8a8cf51e7bb"}, + {file = "bcrypt-4.1.2.tar.gz", hash = "sha256:33313a1200a3ae90b75587ceac502b048b840fc69e7f7a0905b5f87fac7a1258"}, ] [package.extras] @@ -201,30 +231,34 @@ files = [ [[package]] name = "black" -version = "23.11.0" +version = "23.12.1" description = "The uncompromising code formatter." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "black-23.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dbea0bb8575c6b6303cc65017b46351dc5953eea5c0a59d7b7e3a2d2f433a911"}, - {file = "black-23.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:412f56bab20ac85927f3a959230331de5614aecda1ede14b373083f62ec24e6f"}, - {file = "black-23.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d136ef5b418c81660ad847efe0e55c58c8208b77a57a28a503a5f345ccf01394"}, - {file = "black-23.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:6c1cac07e64433f646a9a838cdc00c9768b3c362805afc3fce341af0e6a9ae9f"}, - {file = "black-23.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cf57719e581cfd48c4efe28543fea3d139c6b6f1238b3f0102a9c73992cbb479"}, - {file = "black-23.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:698c1e0d5c43354ec5d6f4d914d0d553a9ada56c85415700b81dc90125aac244"}, - {file = "black-23.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:760415ccc20f9e8747084169110ef75d545f3b0932ee21368f63ac0fee86b221"}, - {file = "black-23.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:58e5f4d08a205b11800332920e285bd25e1a75c54953e05502052738fe16b3b5"}, - {file = "black-23.11.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:45aa1d4675964946e53ab81aeec7a37613c1cb71647b5394779e6efb79d6d187"}, - {file = "black-23.11.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c44b7211a3a0570cc097e81135faa5f261264f4dfaa22bd5ee2875a4e773bd6"}, - {file = "black-23.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a9acad1451632021ee0d146c8765782a0c3846e0e0ea46659d7c4f89d9b212b"}, - {file = "black-23.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:fc7f6a44d52747e65a02558e1d807c82df1d66ffa80a601862040a43ec2e3142"}, - {file = "black-23.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7f622b6822f02bfaf2a5cd31fdb7cd86fcf33dab6ced5185c35f5db98260b055"}, - {file = "black-23.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:250d7e60f323fcfc8ea6c800d5eba12f7967400eb6c2d21ae85ad31c204fb1f4"}, - {file = "black-23.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5133f5507007ba08d8b7b263c7aa0f931af5ba88a29beacc4b2dc23fcefe9c06"}, - {file = "black-23.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:421f3e44aa67138ab1b9bfbc22ee3780b22fa5b291e4db8ab7eee95200726b07"}, - {file = "black-23.11.0-py3-none-any.whl", hash = "sha256:54caaa703227c6e0c87b76326d0862184729a69b73d3b7305b6288e1d830067e"}, - {file = "black-23.11.0.tar.gz", hash = "sha256:4c68855825ff432d197229846f971bc4d6666ce90492e5b02013bcaca4d9ab05"}, + {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, + {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, + {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, + {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, + {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, + {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, + {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, + {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, + {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, + {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, + {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, + {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, + {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, + {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, + {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, + {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, + {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, + {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, + {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, + {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, + {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, + {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, ] [package.dependencies] @@ -238,20 +272,20 @@ typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "celery" -version = "5.3.5" +version = "5.3.6" description = "Distributed Task Queue." category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "celery-5.3.5-py3-none-any.whl", hash = "sha256:30b75ac60fb081c2d9f8881382c148ed7c9052031a75a1e8743ff4b4b071f184"}, - {file = "celery-5.3.5.tar.gz", hash = "sha256:6b65d8dd5db499dd6190c45aa6398e171b99592f2af62c312f7391587feb5458"}, + {file = "celery-5.3.6-py3-none-any.whl", hash = "sha256:9da4ea0118d232ce97dff5ed4974587fb1c0ff5c10042eb15278487cdd27d1af"}, + {file = "celery-5.3.6.tar.gz", hash = "sha256:870cc71d737c0200c397290d730344cc991d13a057534353d124c9380267aab9"}, ] [package.dependencies] @@ -261,7 +295,7 @@ click = ">=8.1.2,<9.0" click-didyoumean = ">=0.3.0" click-plugins = ">=1.1.1" click-repl = ">=0.2.0" -kombu = ">=5.3.3,<6.0" +kombu = ">=5.3.4,<6.0" python-dateutil = ">=2.8.2" tzdata = ">=2022.7" vine = ">=5.1.0,<6.0" @@ -278,7 +312,7 @@ couchbase = ["couchbase (>=3.0.0)"] couchdb = ["pycouchdb (==1.14.2)"] django = ["Django (>=2.2.28)"] dynamodb = ["boto3 (>=1.26.143)"] -elasticsearch = ["elastic-transport (<=8.10.0)", "elasticsearch (<=8.10.1)"] +elasticsearch = ["elastic-transport (<=8.10.0)", "elasticsearch (<=8.11.0)"] eventlet = ["eventlet (>=0.32.0)"] gevent = ["gevent (>=1.5.0)"] librabbitmq = ["librabbitmq (>=2.0.0)"] @@ -557,64 +591,64 @@ files = [ [[package]] name = "coverage" -version = "7.3.2" +version = "7.4.0" description = "Code coverage measurement for Python" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"}, - {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"}, - {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"}, - {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"}, - {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"}, - {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"}, - {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"}, - {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"}, - {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"}, - {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"}, - {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"}, - {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"}, - {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"}, - {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"}, - {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"}, - {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"}, - {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"}, - {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"}, - {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"}, - {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"}, - {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"}, - {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"}, - {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"}, - {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"}, - {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"}, - {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"}, - {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"}, - {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"}, - {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"}, - {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"}, - {file = "coverage-7.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738"}, - {file = "coverage-7.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2"}, - {file = "coverage-7.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2"}, - {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c"}, - {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9"}, - {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82"}, - {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901"}, - {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76"}, - {file = "coverage-7.3.2-cp38-cp38-win32.whl", hash = "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92"}, - {file = "coverage-7.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a"}, - {file = "coverage-7.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce"}, - {file = "coverage-7.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9"}, - {file = "coverage-7.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f"}, - {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25"}, - {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9"}, - {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6"}, - {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc"}, - {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083"}, - {file = "coverage-7.3.2-cp39-cp39-win32.whl", hash = "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce"}, - {file = "coverage-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f"}, - {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"}, - {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"}, + {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, + {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, + {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, + {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, + {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, + {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, + {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, + {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, + {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, + {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, + {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, + {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, + {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, + {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, + {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, + {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, + {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, + {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, ] [package.extras] @@ -636,35 +670,35 @@ dev = ["polib"] [[package]] name = "cryptography" -version = "41.0.5" +version = "41.0.7" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797"}, - {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20"}, - {file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548"}, - {file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d"}, - {file = "cryptography-41.0.5-cp37-abi3-win32.whl", hash = "sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936"}, - {file = "cryptography-41.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84"}, - {file = "cryptography-41.0.5.tar.gz", hash = "sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7"}, + {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, + {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, + {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, + {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, + {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, + {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, + {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, ] [package.dependencies] @@ -692,6 +726,28 @@ files = [ {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, ] +[[package]] +name = "diffsync" +version = "1.10.0" +description = "Library to easily sync/diff/update 2 different data sources" +category = "main" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "diffsync-1.10.0-py3-none-any.whl", hash = "sha256:f4368c97162d51eecc7a8e87026c731197a694026cabcf2ab4f16d18d7bdadbd"}, + {file = "diffsync-1.10.0.tar.gz", hash = "sha256:a9d7cb8e8ce983b446bf858c1c5c82cf473fcf231db73c0855e8c59ee7cd8370"}, +] + +[package.dependencies] +colorama = ">=0.4.3,<0.5.0" +packaging = ">=21.3,<24.0" +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0" +structlog = ">=20.1.0,<23.0.0" +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +redis = ["redis (>=4.3,<5.0)"] + [[package]] name = "dill" version = "0.3.7" @@ -1009,14 +1065,14 @@ pytz = "*" [[package]] name = "django-tree-queries" -version = "0.15.0" +version = "0.16.1" description = "Tree queries with explicit opt-in, without configurability" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "django_tree_queries-0.15.0-py3-none-any.whl", hash = "sha256:cf11340de59d3122919fde46e99966bad40ff942df768d683383b111554134a1"}, - {file = "django_tree_queries-0.15.0.tar.gz", hash = "sha256:0e994c2a4601c021a115a397ec8d0ff7d5e614fae95947f72126e6a419c60f08"}, + {file = "django_tree_queries-0.16.1-py3-none-any.whl", hash = "sha256:b57cebd85136897dc2d7d1da50f3944b13d4713009af655ae221c8202146c2f5"}, + {file = "django_tree_queries-0.16.1.tar.gz", hash = "sha256:5a7765bdbc78742ae7b206348aa674a7e39ef38069ac3854a51b330d25081c43"}, ] [package.extras] @@ -1102,14 +1158,14 @@ sidecar = ["drf-spectacular-sidecar"] [[package]] name = "drf-spectacular-sidecar" -version = "2023.10.1" +version = "2024.1.1" description = "Serve self-contained distribution builds of Swagger UI and Redoc with Django" category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "drf-spectacular-sidecar-2023.10.1.tar.gz", hash = "sha256:546a83c173589715e530fad211af60cbcda2db54eb9e0935d44251639332af6d"}, - {file = "drf_spectacular_sidecar-2023.10.1-py3-none-any.whl", hash = "sha256:3d042a6772512f4d238f0385d3430acf5f669f595fd0be2641fe6bbfb4c7b376"}, + {file = "drf-spectacular-sidecar-2024.1.1.tar.gz", hash = "sha256:099ec58b6af6a90e851a9329b12a57aa1ee7daa6cef62fb504f2ed302f10da76"}, + {file = "drf_spectacular_sidecar-2024.1.1-py3-none-any.whl", hash = "sha256:4b9e33b4dcfa43f84e3db2659d31766a018a2b98b02d8856d9cd69580a4911c9"}, ] [package.dependencies] @@ -1130,6 +1186,21 @@ files = [ [package.extras] dev = ["coverage", "coveralls", "pytest"] +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + [[package]] name = "flake8" version = "5.0.4" @@ -1193,21 +1264,21 @@ smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.40" +version = "3.1.41" description = "GitPython is a Python library used to interact with Git repositories" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "GitPython-3.1.40-py3-none-any.whl", hash = "sha256:cf14627d5a8049ffbf49915732e5eddbe8134c3bdb9d476e6182b676fc573f8a"}, - {file = "GitPython-3.1.40.tar.gz", hash = "sha256:22b126e9ffb671fdd0c129796343a02bf67bf2994b35449ffc9321aa755e18a4"}, + {file = "GitPython-3.1.41-py3-none-any.whl", hash = "sha256:c36b6634d069b3f719610175020a9aed919421c87552185b085e04fbbdb10b7c"}, + {file = "GitPython-3.1.41.tar.gz", hash = "sha256:ed66e624884f76df22c8e16066d567aaa5a37d5b5fa19db2c6df6f7156db9048"}, ] [package.dependencies] gitdb = ">=4.0.1,<5" [package.extras] -test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest", "pytest-cov", "pytest-instafail", "pytest-subtests", "pytest-sugar"] +test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "sumtypes"] [[package]] name = "graphene" @@ -1308,50 +1379,108 @@ six = ">=1.12" [[package]] name = "griffe" -version = "0.38.0" +version = "0.39.1" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "griffe-0.38.0-py3-none-any.whl", hash = "sha256:6a5bc457320e8e199006aa5fbb03e162f5e21abe31aa6221f7a5c37ea0724c71"}, - {file = "griffe-0.38.0.tar.gz", hash = "sha256:9b97487b583042b543d1e28196caee638ecd766c8c4c98135071806cb5333ac2"}, + {file = "griffe-0.39.1-py3-none-any.whl", hash = "sha256:6ce4ecffcf0d2f96362c5974b3f7df812da8f8d4cfcc5ebc8202ef72656fc087"}, + {file = "griffe-0.39.1.tar.gz", hash = "sha256:ead8dfede6e6531cce6bf69090a4f3c6d36fdf923c43f8e85aa530552cef0c09"}, ] [package.dependencies] colorama = ">=0.4" +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "0.17.3" +description = "A minimal low-level HTTP client." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "httpcore-0.17.3-py3-none-any.whl", hash = "sha256:c2789b767ddddfa2a5782e3199b2b7f6894540b17b16ec26b2c4d8e103510b87"}, + {file = "httpcore-0.17.3.tar.gz", hash = "sha256:a6f30213335e34c1ade7be6ec7c47f19f50c56db36abef1a9dfa3815b1cb3888"}, +] + +[package.dependencies] +anyio = ">=3.0,<5.0" +certifi = "*" +h11 = ">=0.13,<0.15" +sniffio = ">=1.0.0,<2.0.0" + +[package.extras] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] + +[[package]] +name = "httpx" +version = "0.24.1" +description = "The next generation HTTP client." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "httpx-0.24.1-py3-none-any.whl", hash = "sha256:06781eb9ac53cde990577af654bd990a4949de37a28bdb4a230d434f3a30b9bd"}, + {file = "httpx-0.24.1.tar.gz", hash = "sha256:5853a43053df830c20f8110c5e69fe44d035d850b2dfe795e196f00fdb774bdd"}, +] + +[package.dependencies] +certifi = "*" +httpcore = ">=0.15.0,<0.18.0" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] + [[package]] name = "idna" -version = "3.4" +version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" category = "main" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, ] [[package]] name = "importlib-metadata" -version = "6.8.0" +version = "4.13.0" description = "Read metadata from Python packages" category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"}, - {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"}, + {file = "importlib_metadata-4.13.0-py3-none-any.whl", hash = "sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116"}, + {file = "importlib_metadata-4.13.0.tar.gz", hash = "sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] [[package]] name = "importlib-resources" @@ -1398,32 +1527,29 @@ files = [ [[package]] name = "isort" -version = "5.12.0" +version = "5.13.2" description = "A Python utility / library to sort Python imports." category = "dev" optional = false python-versions = ">=3.8.0" files = [ - {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, - {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, ] [package.extras] -colors = ["colorama (>=0.4.3)"] -pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] -plugins = ["setuptools"] -requirements-deprecated-finder = ["pip-api", "pipreqs"] +colors = ["colorama (>=0.4.6)"] [[package]] name = "jinja2" -version = "3.1.2" +version = "3.1.3" description = "A very fast and expressive template engine." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, ] [package.dependencies] @@ -1458,14 +1584,14 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- [[package]] name = "jsonschema-specifications" -version = "2023.11.1" +version = "2023.12.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema_specifications-2023.11.1-py3-none-any.whl", hash = "sha256:f596778ab612b3fd29f72ea0d990393d0540a5aab18bf0407a46632eab540779"}, - {file = "jsonschema_specifications-2023.11.1.tar.gz", hash = "sha256:c9b234904ffe02f079bf91b14d79987faa685fd4b39c377a0996954c0090b9ca"}, + {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, + {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, ] [package.dependencies] @@ -1474,20 +1600,20 @@ referencing = ">=0.31.0" [[package]] name = "junos-eznc" -version = "2.6.8" +version = "2.7.0" description = "Junos 'EZ' automation for non-programmers" category = "main" optional = false -python-versions = ">=3.5, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.8" files = [ - {file = "junos-eznc-2.6.8.tar.gz", hash = "sha256:80772346552225b78b6e9812bc791f67735b7e76e753dea5b7cfe888ef40e0a1"}, - {file = "junos_eznc-2.6.8-py2.py3-none-any.whl", hash = "sha256:0a62b1358d69a1eb8cdf13fee777e91b850da44a3ecf4b02c75995f5bde66f21"}, + {file = "junos-eznc-2.7.0.tar.gz", hash = "sha256:a45c90641d24ff4c86796418ea76ca64066c06d0bf644d6b77e605bf957c5c7d"}, + {file = "junos_eznc-2.7.0-py2.py3-none-any.whl", hash = "sha256:27a665957b49cf4caec2047e33b1b62f3a3ece72a244d0b98e93df9c26c984a6"}, ] [package.dependencies] jinja2 = ">=2.7.1" lxml = ">=3.2.4" -ncclient = "0.6.13" +ncclient = ">=0.6.15" paramiko = ">=1.15.2" pyparsing = "*" pyserial = "*" @@ -1499,14 +1625,14 @@ yamlordereddictloader = "*" [[package]] name = "kombu" -version = "5.3.4" +version = "5.3.5" description = "Messaging library for Python." category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "kombu-5.3.4-py3-none-any.whl", hash = "sha256:63bb093fc9bb80cfb3a0972336a5cec1fa7ac5f9ef7e8237c6bf8dda9469313e"}, - {file = "kombu-5.3.4.tar.gz", hash = "sha256:0bb2e278644d11dea6272c17974a3dbb9688a949f3bb60aeb5b791329c44fadc"}, + {file = "kombu-5.3.5-py3-none-any.whl", hash = "sha256:0eac1bbb464afe6fb0924b21bf79460416d25d8abc52546d4f16cad94f789488"}, + {file = "kombu-5.3.5.tar.gz", hash = "sha256:30e470f1a6b49c70dc6f6d13c3e4cc4e178aa6c469ceb6bcd55645385fc84b93"}, ] [package.dependencies] @@ -1534,157 +1660,144 @@ zookeeper = ["kazoo (>=2.8.0)"] [[package]] name = "lazy-object-proxy" -version = "1.9.0" +version = "1.10.0" description = "A fast and thorough lazy object proxy." category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "lazy-object-proxy-1.9.0.tar.gz", hash = "sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-win32.whl", hash = "sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-win32.whl", hash = "sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win32.whl", hash = "sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-win32.whl", hash = "sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-win32.whl", hash = "sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"}, + {file = "lazy-object-proxy-1.10.0.tar.gz", hash = "sha256:78247b6d45f43a52ef35c25b5581459e85117225408a4128a3daf8bf9648ac69"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:855e068b0358ab916454464a884779c7ffa312b8925c6f7401e952dcf3b89977"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab7004cf2e59f7c2e4345604a3e6ea0d92ac44e1c2375527d56492014e690c3"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc0d2fc424e54c70c4bc06787e4072c4f3b1aa2f897dfdc34ce1013cf3ceef05"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e2adb09778797da09d2b5ebdbceebf7dd32e2c96f79da9052b2e87b6ea495895"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1f711e2c6dcd4edd372cf5dec5c5a30d23bba06ee012093267b3376c079ec83"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-win32.whl", hash = "sha256:76a095cfe6045c7d0ca77db9934e8f7b71b14645f0094ffcd842349ada5c5fb9"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:b4f87d4ed9064b2628da63830986c3d2dca7501e6018347798313fcf028e2fd4"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fec03caabbc6b59ea4a638bee5fce7117be8e99a4103d9d5ad77f15d6f81020c"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02c83f957782cbbe8136bee26416686a6ae998c7b6191711a04da776dc9e47d4"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009e6bb1f1935a62889ddc8541514b6a9e1fcf302667dcb049a0be5c8f613e56"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75fc59fc450050b1b3c203c35020bc41bd2695ed692a392924c6ce180c6f1dc9"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:782e2c9b2aab1708ffb07d4bf377d12901d7a1d99e5e410d648d892f8967ab1f"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-win32.whl", hash = "sha256:edb45bb8278574710e68a6b021599a10ce730d156e5b254941754a9cc0b17d03"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:e271058822765ad5e3bca7f05f2ace0de58a3f4e62045a8c90a0dfd2f8ad8cc6"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e98c8af98d5707dcdecc9ab0863c0ea6e88545d42ca7c3feffb6b4d1e370c7ba"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:952c81d415b9b80ea261d2372d2a4a2332a3890c2b83e0535f263ddfe43f0d43"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80b39d3a151309efc8cc48675918891b865bdf742a8616a337cb0090791a0de9"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e221060b701e2aa2ea991542900dd13907a5c90fa80e199dbf5a03359019e7a3"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:92f09ff65ecff3108e56526f9e2481b8116c0b9e1425325e13245abfd79bdb1b"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-win32.whl", hash = "sha256:3ad54b9ddbe20ae9f7c1b29e52f123120772b06dbb18ec6be9101369d63a4074"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:127a789c75151db6af398b8972178afe6bda7d6f68730c057fbbc2e96b08d282"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4ed0518a14dd26092614412936920ad081a424bdcb54cc13349a8e2c6d106a"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ad9e6ed739285919aa9661a5bbed0aaf410aa60231373c5579c6b4801bd883c"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc0a92c02fa1ca1e84fc60fa258458e5bf89d90a1ddaeb8ed9cc3147f417255"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0aefc7591920bbd360d57ea03c995cebc204b424524a5bd78406f6e1b8b2a5d8"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5faf03a7d8942bb4476e3b62fd0f4cf94eaf4618e304a19865abf89a35c0bbee"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-win32.whl", hash = "sha256:e333e2324307a7b5d86adfa835bb500ee70bfcd1447384a822e96495796b0ca4"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:cb73507defd385b7705c599a94474b1d5222a508e502553ef94114a143ec6696"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:366c32fe5355ef5fc8a232c5436f4cc66e9d3e8967c01fb2e6302fd6627e3d94"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2297f08f08a2bb0d32a4265e98a006643cd7233fb7983032bd61ac7a02956b3b"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18dd842b49456aaa9a7cf535b04ca4571a302ff72ed8740d06b5adcd41fe0757"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:217138197c170a2a74ca0e05bddcd5f1796c735c37d0eee33e43259b192aa424"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a3a87cf1e133e5b1994144c12ca4aa3d9698517fe1e2ca82977781b16955658"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-win32.whl", hash = "sha256:30b339b2a743c5288405aa79a69e706a06e02958eab31859f7f3c04980853b70"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:a899b10e17743683b293a729d3a11f2f399e8a90c73b089e29f5d0fe3509f0dd"}, + {file = "lazy_object_proxy-1.10.0-pp310.pp311.pp312.pp38.pp39-none-any.whl", hash = "sha256:80fa48bd89c8f2f456fc0765c11c23bf5af827febacd2f523ca5bc1893fcc09d"}, ] [[package]] name = "lxml" -version = "4.9.3" +version = "5.1.0" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" -files = [ - {file = "lxml-4.9.3-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c"}, - {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d"}, - {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef"}, - {file = "lxml-4.9.3-cp27-cp27m-win32.whl", hash = "sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7"}, - {file = "lxml-4.9.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1"}, - {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb"}, - {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e"}, - {file = "lxml-4.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76"}, - {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23"}, - {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1247694b26342a7bf47c02e513d32225ededd18045264d40758abeb3c838a51f"}, - {file = "lxml-4.9.3-cp310-cp310-win32.whl", hash = "sha256:cdb650fc86227eba20de1a29d4b2c1bfe139dc75a0669270033cb2ea3d391b85"}, - {file = "lxml-4.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d"}, - {file = "lxml-4.9.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b"}, - {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120"}, - {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e28c51fa0ce5674be9f560c6761c1b441631901993f76700b1b30ca6c8378d6"}, - {file = "lxml-4.9.3-cp311-cp311-win32.whl", hash = "sha256:0bfd0767c5c1de2551a120673b72e5d4b628737cb05414f03c3277bf9bed3305"}, - {file = "lxml-4.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:25f32acefac14ef7bd53e4218fe93b804ef6f6b92ffdb4322bb6d49d94cad2bc"}, - {file = "lxml-4.9.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:d3ff32724f98fbbbfa9f49d82852b159e9784d6094983d9a8b7f2ddaebb063d4"}, - {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48d6ed886b343d11493129e019da91d4039826794a3e3027321c56d9e71505be"}, - {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9a92d3faef50658dd2c5470af249985782bf754c4e18e15afb67d3ab06233f13"}, - {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b4e4bc18382088514ebde9328da057775055940a1f2e18f6ad2d78aa0f3ec5b9"}, - {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fc9b106a1bf918db68619fdcd6d5ad4f972fdd19c01d19bdb6bf63f3589a9ec5"}, - {file = "lxml-4.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:d37017287a7adb6ab77e1c5bee9bcf9660f90ff445042b790402a654d2ad81d8"}, - {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:56dc1f1ebccc656d1b3ed288f11e27172a01503fc016bcabdcbc0978b19352b7"}, - {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:578695735c5a3f51569810dfebd05dd6f888147a34f0f98d4bb27e92b76e05c2"}, - {file = "lxml-4.9.3-cp35-cp35m-win32.whl", hash = "sha256:704f61ba8c1283c71b16135caf697557f5ecf3e74d9e453233e4771d68a1f42d"}, - {file = "lxml-4.9.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c41bfca0bd3532d53d16fd34d20806d5c2b1ace22a2f2e4c0008570bf2c58833"}, - {file = "lxml-4.9.3-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584"}, - {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0c0850c8b02c298d3c7006b23e98249515ac57430e16a166873fc47a5d549287"}, - {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aca086dc5f9ef98c512bac8efea4483eb84abbf926eaeedf7b91479feb092458"}, - {file = "lxml-4.9.3-cp36-cp36m-win32.whl", hash = "sha256:50baa9c1c47efcaef189f31e3d00d697c6d4afda5c3cde0302d063492ff9b477"}, - {file = "lxml-4.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4"}, - {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:081d32421db5df44c41b7f08a334a090a545c54ba977e47fd7cc2deece78809a"}, - {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:23eed6d7b1a3336ad92d8e39d4bfe09073c31bfe502f20ca5116b2a334f8ec02"}, - {file = "lxml-4.9.3-cp37-cp37m-win32.whl", hash = "sha256:1509dd12b773c02acd154582088820893109f6ca27ef7291b003d0e81666109f"}, - {file = "lxml-4.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa"}, - {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3e9bdd30efde2b9ccfa9cb5768ba04fe71b018a25ea093379c857c9dad262c40"}, - {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fcdd00edfd0a3001e0181eab3e63bd5c74ad3e67152c84f93f13769a40e073a7"}, - {file = "lxml-4.9.3-cp38-cp38-win32.whl", hash = "sha256:57aba1bbdf450b726d58b2aea5fe47c7875f5afb2c4a23784ed78f19a0462574"}, - {file = "lxml-4.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96"}, - {file = "lxml-4.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6420a005548ad52154c8ceab4a1290ff78d757f9e5cbc68f8c77089acd3c432"}, - {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bb3bb49c7a6ad9d981d734ef7c7193bc349ac338776a0360cc671eaee89bcf69"}, - {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d27be7405547d1f958b60837dc4c1007da90b8b23f54ba1f8b728c78fdb19d50"}, - {file = "lxml-4.9.3-cp39-cp39-win32.whl", hash = "sha256:8df133a2ea5e74eef5e8fc6f19b9e085f758768a16e9877a60aec455ed2609b2"}, - {file = "lxml-4.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2"}, - {file = "lxml-4.9.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35"}, - {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0"}, - {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3"}, - {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9"}, - {file = "lxml-4.9.3.tar.gz", hash = "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c"}, +python-versions = ">=3.6" +files = [ + {file = "lxml-5.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:704f5572ff473a5f897745abebc6df40f22d4133c1e0a1f124e4f2bd3330ff7e"}, + {file = "lxml-5.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9d3c0f8567ffe7502d969c2c1b809892dc793b5d0665f602aad19895f8d508da"}, + {file = "lxml-5.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5fcfbebdb0c5d8d18b84118842f31965d59ee3e66996ac842e21f957eb76138c"}, + {file = "lxml-5.1.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f37c6d7106a9d6f0708d4e164b707037b7380fcd0b04c5bd9cae1fb46a856fb"}, + {file = "lxml-5.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2befa20a13f1a75c751f47e00929fb3433d67eb9923c2c0b364de449121f447c"}, + {file = "lxml-5.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22b7ee4c35f374e2c20337a95502057964d7e35b996b1c667b5c65c567d2252a"}, + {file = "lxml-5.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bf8443781533b8d37b295016a4b53c1494fa9a03573c09ca5104550c138d5c05"}, + {file = "lxml-5.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:82bddf0e72cb2af3cbba7cec1d2fd11fda0de6be8f4492223d4a268713ef2147"}, + {file = "lxml-5.1.0-cp310-cp310-win32.whl", hash = "sha256:b66aa6357b265670bb574f050ffceefb98549c721cf28351b748be1ef9577d93"}, + {file = "lxml-5.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:4946e7f59b7b6a9e27bef34422f645e9a368cb2be11bf1ef3cafc39a1f6ba68d"}, + {file = "lxml-5.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:14deca1460b4b0f6b01f1ddc9557704e8b365f55c63070463f6c18619ebf964f"}, + {file = "lxml-5.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ed8c3d2cd329bf779b7ed38db176738f3f8be637bb395ce9629fc76f78afe3d4"}, + {file = "lxml-5.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:436a943c2900bb98123b06437cdd30580a61340fbdb7b28aaf345a459c19046a"}, + {file = "lxml-5.1.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:acb6b2f96f60f70e7f34efe0c3ea34ca63f19ca63ce90019c6cbca6b676e81fa"}, + {file = "lxml-5.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af8920ce4a55ff41167ddbc20077f5698c2e710ad3353d32a07d3264f3a2021e"}, + {file = "lxml-5.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cfced4a069003d8913408e10ca8ed092c49a7f6cefee9bb74b6b3e860683b45"}, + {file = "lxml-5.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9e5ac3437746189a9b4121db2a7b86056ac8786b12e88838696899328fc44bb2"}, + {file = "lxml-5.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f4c9bda132ad108b387c33fabfea47866af87f4ea6ffb79418004f0521e63204"}, + {file = "lxml-5.1.0-cp311-cp311-win32.whl", hash = "sha256:bc64d1b1dab08f679fb89c368f4c05693f58a9faf744c4d390d7ed1d8223869b"}, + {file = "lxml-5.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:a5ab722ae5a873d8dcee1f5f45ddd93c34210aed44ff2dc643b5025981908cda"}, + {file = "lxml-5.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9aa543980ab1fbf1720969af1d99095a548ea42e00361e727c58a40832439114"}, + {file = "lxml-5.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6f11b77ec0979f7e4dc5ae081325a2946f1fe424148d3945f943ceaede98adb8"}, + {file = "lxml-5.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a36c506e5f8aeb40680491d39ed94670487ce6614b9d27cabe45d94cd5d63e1e"}, + {file = "lxml-5.1.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f643ffd2669ffd4b5a3e9b41c909b72b2a1d5e4915da90a77e119b8d48ce867a"}, + {file = "lxml-5.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16dd953fb719f0ffc5bc067428fc9e88f599e15723a85618c45847c96f11f431"}, + {file = "lxml-5.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16018f7099245157564d7148165132c70adb272fb5a17c048ba70d9cc542a1a1"}, + {file = "lxml-5.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:82cd34f1081ae4ea2ede3d52f71b7be313756e99b4b5f829f89b12da552d3aa3"}, + {file = "lxml-5.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:19a1bc898ae9f06bccb7c3e1dfd73897ecbbd2c96afe9095a6026016e5ca97b8"}, + {file = "lxml-5.1.0-cp312-cp312-win32.whl", hash = "sha256:13521a321a25c641b9ea127ef478b580b5ec82aa2e9fc076c86169d161798b01"}, + {file = "lxml-5.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:1ad17c20e3666c035db502c78b86e58ff6b5991906e55bdbef94977700c72623"}, + {file = "lxml-5.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:24ef5a4631c0b6cceaf2dbca21687e29725b7c4e171f33a8f8ce23c12558ded1"}, + {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d2900b7f5318bc7ad8631d3d40190b95ef2aa8cc59473b73b294e4a55e9f30f"}, + {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:601f4a75797d7a770daed8b42b97cd1bb1ba18bd51a9382077a6a247a12aa38d"}, + {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4b68c961b5cc402cbd99cca5eb2547e46ce77260eb705f4d117fd9c3f932b95"}, + {file = "lxml-5.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:afd825e30f8d1f521713a5669b63657bcfe5980a916c95855060048b88e1adb7"}, + {file = "lxml-5.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:262bc5f512a66b527d026518507e78c2f9c2bd9eb5c8aeeb9f0eb43fcb69dc67"}, + {file = "lxml-5.1.0-cp36-cp36m-win32.whl", hash = "sha256:e856c1c7255c739434489ec9c8aa9cdf5179785d10ff20add308b5d673bed5cd"}, + {file = "lxml-5.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:c7257171bb8d4432fe9d6fdde4d55fdbe663a63636a17f7f9aaba9bcb3153ad7"}, + {file = "lxml-5.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b9e240ae0ba96477682aa87899d94ddec1cc7926f9df29b1dd57b39e797d5ab5"}, + {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a96f02ba1bcd330807fc060ed91d1f7a20853da6dd449e5da4b09bfcc08fdcf5"}, + {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3898ae2b58eeafedfe99e542a17859017d72d7f6a63de0f04f99c2cb125936"}, + {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61c5a7edbd7c695e54fca029ceb351fc45cd8860119a0f83e48be44e1c464862"}, + {file = "lxml-5.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3aeca824b38ca78d9ee2ab82bd9883083d0492d9d17df065ba3b94e88e4d7ee6"}, + {file = "lxml-5.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8f52fe6859b9db71ee609b0c0a70fea5f1e71c3462ecf144ca800d3f434f0764"}, + {file = "lxml-5.1.0-cp37-cp37m-win32.whl", hash = "sha256:d42e3a3fc18acc88b838efded0e6ec3edf3e328a58c68fbd36a7263a874906c8"}, + {file = "lxml-5.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:eac68f96539b32fce2c9b47eb7c25bb2582bdaf1bbb360d25f564ee9e04c542b"}, + {file = "lxml-5.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ae15347a88cf8af0949a9872b57a320d2605ae069bcdf047677318bc0bba45b1"}, + {file = "lxml-5.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c26aab6ea9c54d3bed716b8851c8bfc40cb249b8e9880e250d1eddde9f709bf5"}, + {file = "lxml-5.1.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:342e95bddec3a698ac24378d61996b3ee5ba9acfeb253986002ac53c9a5f6f84"}, + {file = "lxml-5.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:725e171e0b99a66ec8605ac77fa12239dbe061482ac854d25720e2294652eeaa"}, + {file = "lxml-5.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d184e0d5c918cff04cdde9dbdf9600e960161d773666958c9d7b565ccc60c45"}, + {file = "lxml-5.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:98f3f020a2b736566c707c8e034945c02aa94e124c24f77ca097c446f81b01f1"}, + {file = "lxml-5.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d48fc57e7c1e3df57be5ae8614bab6d4e7b60f65c5457915c26892c41afc59e"}, + {file = "lxml-5.1.0-cp38-cp38-win32.whl", hash = "sha256:7ec465e6549ed97e9f1e5ed51c657c9ede767bc1c11552f7f4d022c4df4a977a"}, + {file = "lxml-5.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:b21b4031b53d25b0858d4e124f2f9131ffc1530431c6d1321805c90da78388d1"}, + {file = "lxml-5.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:52427a7eadc98f9e62cb1368a5079ae826f94f05755d2d567d93ee1bc3ceb354"}, + {file = "lxml-5.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6a2a2c724d97c1eb8cf966b16ca2915566a4904b9aad2ed9a09c748ffe14f969"}, + {file = "lxml-5.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:843b9c835580d52828d8f69ea4302537337a21e6b4f1ec711a52241ba4a824f3"}, + {file = "lxml-5.1.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b99f564659cfa704a2dd82d0684207b1aadf7d02d33e54845f9fc78e06b7581"}, + {file = "lxml-5.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f8b0c78e7aac24979ef09b7f50da871c2de2def043d468c4b41f512d831e912"}, + {file = "lxml-5.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bcf86dfc8ff3e992fed847c077bd875d9e0ba2fa25d859c3a0f0f76f07f0c8d"}, + {file = "lxml-5.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:49a9b4af45e8b925e1cd6f3b15bbba2c81e7dba6dce170c677c9cda547411e14"}, + {file = "lxml-5.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:280f3edf15c2a967d923bcfb1f8f15337ad36f93525828b40a0f9d6c2ad24890"}, + {file = "lxml-5.1.0-cp39-cp39-win32.whl", hash = "sha256:ed7326563024b6e91fef6b6c7a1a2ff0a71b97793ac33dbbcf38f6005e51ff6e"}, + {file = "lxml-5.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:8d7b4beebb178e9183138f552238f7e6613162a42164233e2bda00cb3afac58f"}, + {file = "lxml-5.1.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9bd0ae7cc2b85320abd5e0abad5ccee5564ed5f0cc90245d2f9a8ef330a8deae"}, + {file = "lxml-5.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8c1d679df4361408b628f42b26a5d62bd3e9ba7f0c0e7969f925021554755aa"}, + {file = "lxml-5.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2ad3a8ce9e8a767131061a22cd28fdffa3cd2dc193f399ff7b81777f3520e372"}, + {file = "lxml-5.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:304128394c9c22b6569eba2a6d98392b56fbdfbad58f83ea702530be80d0f9df"}, + {file = "lxml-5.1.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d74fcaf87132ffc0447b3c685a9f862ffb5b43e70ea6beec2fb8057d5d2a1fea"}, + {file = "lxml-5.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:8cf5877f7ed384dabfdcc37922c3191bf27e55b498fecece9fd5c2c7aaa34c33"}, + {file = "lxml-5.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:877efb968c3d7eb2dad540b6cabf2f1d3c0fbf4b2d309a3c141f79c7e0061324"}, + {file = "lxml-5.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f14a4fb1c1c402a22e6a341a24c1341b4a3def81b41cd354386dcb795f83897"}, + {file = "lxml-5.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:25663d6e99659544ee8fe1b89b1a8c0aaa5e34b103fab124b17fa958c4a324a6"}, + {file = "lxml-5.1.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8b9f19df998761babaa7f09e6bc169294eefafd6149aaa272081cbddc7ba4ca3"}, + {file = "lxml-5.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e53d7e6a98b64fe54775d23a7c669763451340c3d44ad5e3a3b48a1efbdc96f"}, + {file = "lxml-5.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c3cd1fc1dc7c376c54440aeaaa0dcc803d2126732ff5c6b68ccd619f2e64be4f"}, + {file = "lxml-5.1.0.tar.gz", hash = "sha256:3eea6ed6e6c918e468e693c41ef07f3c3acc310b70ddd9cc72d9ef84bc9564ca"}, ] [package.extras] cssselect = ["cssselect (>=0.7)"] html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=0.29.35)"] +source = ["Cython (>=3.0.7)"] [[package]] name = "markdown" @@ -1731,62 +1844,72 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "2.1.3" +version = "2.1.4" description = "Safely add untrusted strings to HTML/XML markup." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, - {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de8153a7aae3835484ac168a9a9bdaa0c5eee4e0bc595503c95d53b942879c84"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e888ff76ceb39601c59e219f281466c6d7e66bd375b4ec1ce83bcdc68306796b"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0b838c37ba596fcbfca71651a104a611543077156cb0a26fe0c475e1f152ee8"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac1ebf6983148b45b5fa48593950f90ed6d1d26300604f321c74a9ca1609f8e"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbad3d346df8f9d72622ac71b69565e621ada2ce6572f37c2eae8dacd60385d"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5291d98cd3ad9a562883468c690a2a238c4a6388ab3bd155b0c75dd55ece858"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a7cc49ef48a3c7a0005a949f3c04f8baa5409d3f663a1b36f0eba9bfe2a0396e"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b83041cda633871572f0d3c41dddd5582ad7d22f65a72eacd8d3d6d00291df26"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-win32.whl", hash = "sha256:0c26f67b3fe27302d3a412b85ef696792c4a2386293c53ba683a89562f9399b0"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:a76055d5cb1c23485d7ddae533229039b850db711c554a12ea64a0fd8a0129e2"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9e9e3c4020aa2dc62d5dd6743a69e399ce3de58320522948af6140ac959ab863"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0042d6a9880b38e1dd9ff83146cc3c9c18a059b9360ceae207805567aacccc69"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d03fea4c4e9fd0ad75dc2e7e2b6757b80c152c032ea1d1de487461d8140efc"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ab3a886a237f6e9c9f4f7d272067e712cdb4efa774bef494dccad08f39d8ae6"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf5ebbec056817057bfafc0445916bb688a255a5146f900445d081db08cbabb"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e1a0d1924a5013d4f294087e00024ad25668234569289650929ab871231668e7"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e7902211afd0af05fbadcc9a312e4cf10f27b779cf1323e78d52377ae4b72bea"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c669391319973e49a7c6230c218a1e3044710bc1ce4c8e6eb71f7e6d43a2c131"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-win32.whl", hash = "sha256:31f57d64c336b8ccb1966d156932f3daa4fee74176b0fdc48ef580be774aae74"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:54a7e1380dfece8847c71bf7e33da5d084e9b889c75eca19100ef98027bd9f56"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a76cd37d229fc385738bd1ce4cba2a121cf26b53864c1772694ad0ad348e509e"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:987d13fe1d23e12a66ca2073b8d2e2a75cec2ecb8eab43ff5624ba0ad42764bc"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5244324676254697fe5c181fc762284e2c5fceeb1c4e3e7f6aca2b6f107e60dc"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78bc995e004681246e85e28e068111a4c3f35f34e6c62da1471e844ee1446250"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4d176cfdfde84f732c4a53109b293d05883e952bbba68b857ae446fa3119b4f"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f9917691f410a2e0897d1ef99619fd3f7dd503647c8ff2475bf90c3cf222ad74"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f06e5a9e99b7df44640767842f414ed5d7bedaaa78cd817ce04bbd6fd86e2dd6"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396549cea79e8ca4ba65525470d534e8a41070e6b3500ce2414921099cb73e8d"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-win32.whl", hash = "sha256:f6be2d708a9d0e9b0054856f07ac7070fbe1754be40ca8525d5adccdbda8f475"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:5045e892cfdaecc5b4c01822f353cf2c8feb88a6ec1c0adef2a2e705eef0f656"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7a07f40ef8f0fbc5ef1000d0c78771f4d5ca03b4953fc162749772916b298fc4"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d18b66fe626ac412d96c2ab536306c736c66cf2a31c243a45025156cc190dc8a"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:698e84142f3f884114ea8cf83e7a67ca8f4ace8454e78fe960646c6c91c63bfa"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a3b78a5af63ec10d8604180380c13dcd870aba7928c1fe04e881d5c792dc4e"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:15866d7f2dc60cfdde12ebb4e75e41be862348b4728300c36cdf405e258415ec"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6aa5e2e7fc9bc042ae82d8b79d795b9a62bd8f15ba1e7594e3db243f158b5565"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:54635102ba3cf5da26eb6f96c4b8c53af8a9c0d97b64bdcb592596a6255d8518"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-win32.whl", hash = "sha256:3583a3a3ab7958e354dc1d25be74aee6228938312ee875a22330c4dc2e41beb0"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-win_amd64.whl", hash = "sha256:d6e427c7378c7f1b2bef6a344c925b8b63623d3321c09a237b7cc0e77dd98ceb"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:bf1196dcc239e608605b716e7b166eb5faf4bc192f8a44b81e85251e62584bd2"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4df98d4a9cd6a88d6a585852f56f2155c9cdb6aec78361a19f938810aa020954"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b835aba863195269ea358cecc21b400276747cc977492319fd7682b8cd2c253d"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23984d1bdae01bee794267424af55eef4dfc038dc5d1272860669b2aa025c9e3"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c98c33ffe20e9a489145d97070a435ea0679fddaabcafe19982fe9c971987d5"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9896fca4a8eb246defc8b2a7ac77ef7553b638e04fbf170bff78a40fa8a91474"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b0fe73bac2fed83839dbdbe6da84ae2a31c11cfc1c777a40dbd8ac8a6ed1560f"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c7556bafeaa0a50e2fe7dc86e0382dea349ebcad8f010d5a7dc6ba568eaaa789"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-win32.whl", hash = "sha256:fc1a75aa8f11b87910ffd98de62b29d6520b6d6e8a3de69a70ca34dea85d2a8a"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-win_amd64.whl", hash = "sha256:3a66c36a3864df95e4f62f9167c734b3b1192cb0851b43d7cc08040c074c6279"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:765f036a3d00395a326df2835d8f86b637dbaf9832f90f5d196c3b8a7a5080cb"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:21e7af8091007bf4bebf4521184f4880a6acab8df0df52ef9e513d8e5db23411"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5c31fe855c77cad679b302aabc42d724ed87c043b1432d457f4976add1c2c3e"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7653fa39578957bc42e5ebc15cf4361d9e0ee4b702d7d5ec96cdac860953c5b4"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47bb5f0142b8b64ed1399b6b60f700a580335c8e1c57f2f15587bd072012decc"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fe8512ed897d5daf089e5bd010c3dc03bb1bdae00b35588c49b98268d4a01e00"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:36d7626a8cca4d34216875aee5a1d3d654bb3dac201c1c003d182283e3205949"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b6f14a9cd50c3cb100eb94b3273131c80d102e19bb20253ac7bd7336118a673a"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-win32.whl", hash = "sha256:c8f253a84dbd2c63c19590fa86a032ef3d8cc18923b8049d91bcdeeb2581fbf6"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:8b570a1537367b52396e53325769608f2a687ec9a4363647af1cded8928af959"}, + {file = "MarkupSafe-2.1.4.tar.gz", hash = "sha256:3aae9af4cac263007fd6309c64c6ab4506dd2b79382d9d19a1994f9240b8db4f"}, ] [[package]] @@ -1898,14 +2021,14 @@ requests = ">=2.26" [[package]] name = "mkdocs-material-extensions" -version = "1.3" +version = "1.3.1" description = "Extension pack for Python Markdown and MkDocs Material." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_material_extensions-1.3-py3-none-any.whl", hash = "sha256:0297cc48ba68a9fdd1ef3780a3b41b534b0d0df1d1181a44676fda5f464eeadc"}, - {file = "mkdocs_material_extensions-1.3.tar.gz", hash = "sha256:f0446091503acb110a7cab9349cbc90eeac51b58d1caa92a704a81ca1e24ddbd"}, + {file = "mkdocs_material_extensions-1.3.1-py3-none-any.whl", hash = "sha256:adff8b62700b25cb77b53358dad940f3ef973dd6db797907c49e3c2ef3ab4e31"}, + {file = "mkdocs_material_extensions-1.3.1.tar.gz", hash = "sha256:10c9511cea88f568257f960358a467d12b970e1f7b2c0e5fb2bb48cab1928443"}, ] [[package]] @@ -1967,7 +2090,7 @@ mkdocstrings = ">=0.20" name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -2010,14 +2133,14 @@ typing-extensions = ">=4.3.0" [[package]] name = "nautobot" -version = "2.0.4" +version = "2.1.1" description = "Source of truth and network automation platform." category = "main" optional = false python-versions = ">=3.8,<3.12" files = [ - {file = "nautobot-2.0.4-py3-none-any.whl", hash = "sha256:78687b72b90b5d7a6e70399bb16183eca2e0c8f25a574ae1b5d6a6ed3b7c01c8"}, - {file = "nautobot-2.0.4.tar.gz", hash = "sha256:3589c83a1563ba4a6553862f8ae5a56ef0efbf4e6574b429a871178eb8f07512"}, + {file = "nautobot-2.1.1-py3-none-any.whl", hash = "sha256:0b1592274bdb89b767266ec6b6837e67c2b82e2d7bf02308d6e2f877cf839731"}, + {file = "nautobot-2.1.1.tar.gz", hash = "sha256:62df1aa1a972396973df002b51f10dac7c76feeb52387fb94c1d10e41a2aa3e5"}, ] [package.dependencies] @@ -2038,7 +2161,7 @@ django-redis = ">=5.3.0,<5.4.0" django-tables2 = ">=2.6.0,<2.7.0" django-taggit = ">=4.0.0,<4.1.0" django-timezone-field = ">=5.1,<5.2" -django-tree-queries = ">=0.15.0,<0.16.0" +django-tree-queries = ">=0.16.1,<0.17.0" django-webserver = ">=1.2.0,<1.3.0" djangorestframework = ">=3.14.0,<3.15.0" drf-react-template-framework = ">=0.0.17,<0.0.18" @@ -2056,7 +2179,7 @@ netutils = ">=1.6.0,<2.0.0" packaging = ">=23.1,<23.2" Pillow = ">=10.0.0,<10.1.0" prometheus-client = ">=0.17.1,<0.18.0" -psycopg2-binary = ">=2.9.6,<2.10.0" +psycopg2-binary = ">=2.9.9,<2.10.0" python-slugify = ">=8.0.1,<8.1.0" pyuwsgi = ">=2.0.21,<2.1.0" PyYAML = ">=6.0,<6.1" @@ -2071,15 +2194,65 @@ napalm = ["napalm (>=4.1.0,<4.2.0)"] remote-storage = ["django-storages (>=1.13.2,<1.14.0)"] sso = ["social-auth-core[openidconnect,saml] (>=4.4.2,<4.5.0)"] +[[package]] +name = "nautobot-plugin-nornir" +version = "2.0.0" +description = "Nautobot Nornir plugin." +category = "main" +optional = false +python-versions = ">=3.8,<3.12" +files = [ + {file = "nautobot_plugin_nornir-2.0.0-py3-none-any.whl", hash = "sha256:9789fa5b0ba342687f8692a29ad28b1194c02506fb3ce3d778cf245a492987b0"}, + {file = "nautobot_plugin_nornir-2.0.0.tar.gz", hash = "sha256:24d663868e5f96e13f7caf2033c71acb1296715a9fb84e1aff41742fa583b8ef"}, +] + +[package.dependencies] +netutils = ">=1.6.0" +nornir-nautobot = ">=3.0.0,<4.0.0" + +[package.extras] +nautobot = ["nautobot (>=2.0.0,<3.0.0)"] + +[[package]] +name = "nautobot-ssot" +version = "2.2.0" +description = "Nautobot Single Source of Truth" +category = "main" +optional = false +python-versions = ">=3.8,<3.12" +files = [ + {file = "nautobot_ssot-2.2.0-py3-none-any.whl", hash = "sha256:0abd1139f45c438a9298d341a2a1792cbfbc4381628f16cc9eedf03beb1c063d"}, + {file = "nautobot_ssot-2.2.0.tar.gz", hash = "sha256:2ca0871737d586bcc660e6857e4f446f1d1a7859c13a395570b59288ae4be2c4"}, +] + +[package.dependencies] +diffsync = ">=1.6.0,<2.0.0" +drf-spectacular = "0.26.3" +Markdown = "!=3.3.5" +nautobot = ">=2.0.0,<3.0.0" +packaging = ">=21.3,<24" +prometheus-client = ">=0.17.1,<0.18.0" + +[package.extras] +aci = ["PyYAML (>=6)"] +all = ["Jinja2 (>=2.11.3)", "PyYAML (>=6)", "cloudvision (>=1.9.0,<2.0.0)", "cvprac (>=1.2.2,<2.0.0)", "dnspython (>=2.1.0,<3.0.0)", "ijson (>=2.5.1)", "ipfabric (>=6.0.9,<6.1.0)", "ipfabric-diagrams (>=6.0.2,<6.1.0)", "nautobot-device-lifecycle-mgmt (>=2.0.0,<3.0.0)", "netutils (>=1.0.0,<2.0.0)", "oauthlib (>=3.1.0)", "python-magic (>=0.4.15)", "pytz (>=2019.3)", "requests (>=2.21.0)", "requests-oauthlib (>=1.3.0)", "six (>=1.13.0)"] +aristacv = ["cloudvision (>=1.9.0,<2.0.0)", "cvprac (>=1.2.2,<2.0.0)"] +device42 = ["requests (>=2.21.0)"] +infoblox = ["dnspython (>=2.1.0,<3.0.0)"] +ipfabric = ["httpx (>=0.23.3)", "ipfabric (>=6.0.9,<6.1.0)", "ipfabric-diagrams (>=6.0.2,<6.1.0)", "netutils (>=1.0.0,<2.0.0)"] +nautobot-device-lifecycle-mgmt = ["nautobot-device-lifecycle-mgmt (>=2.0.0,<3.0.0)"] +pysnow = ["ijson (>=2.5.1)", "oauthlib (>=3.1.0)", "python-magic (>=0.4.15)", "pytz (>=2019.3)", "requests (>=2.21.0)", "requests-oauthlib (>=1.3.0)", "six (>=1.13.0)"] +servicenow = ["Jinja2 (>=2.11.3)", "PyYAML (>=6)", "ijson (>=2.5.1)", "oauthlib (>=3.1.0)", "python-magic (>=0.4.15)", "pytz (>=2019.3)", "requests (>=2.21.0)", "requests-oauthlib (>=1.3.0)", "six (>=1.13.0)"] + [[package]] name = "ncclient" -version = "0.6.13" +version = "0.6.15" description = "Python library for NETCONF clients" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ - {file = "ncclient-0.6.13.tar.gz", hash = "sha256:f9f8cea8bcbe057e1b948b9cd1b241eafb8a3f73c4981fbdfa1cc6ed69c0a7b3"}, + {file = "ncclient-0.6.15.tar.gz", hash = "sha256:6757cb41bc9160dfe47f22f5de8cf2f1adf22f27463fb50453cc415ab96773d8"}, ] [package.dependencies] @@ -2135,16 +2308,122 @@ files = [ [package.extras] optionals = ["jsonschema (>=4.17.3,<5.0.0)", "napalm (>=4.0.0,<5.0.0)"] +[[package]] +name = "nornir" +version = "3.4.1" +description = "Pluggable multi-threaded framework with inventory management to help operate collections of devices" +category = "main" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "nornir-3.4.1-py3-none-any.whl", hash = "sha256:db079cb95e3baf855530f4f40cb6ee93f93e1bf3cb74ac08180546adb1b987b8"}, + {file = "nornir-3.4.1.tar.gz", hash = "sha256:82a90a3478a3890bef8ad51b256fa966e6e4ca326cbe20a230918ef907cf68c3"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4,<5", markers = "python_version < \"3.10\""} +mypy_extensions = ">=1.0.0,<2.0.0" +"ruamel.yaml" = ">=0.17" + +[[package]] +name = "nornir-jinja2" +version = "0.2.0" +description = "Jinja2 plugins for nornir" +category = "main" +optional = false +python-versions = ">=3.6,<4.0" +files = [ + {file = "nornir_jinja2-0.2.0-py3-none-any.whl", hash = "sha256:0c446bec7a8492923d4eb9ca00fb327603b41bc35d5f0112843c048737b506b1"}, + {file = "nornir_jinja2-0.2.0.tar.gz", hash = "sha256:9ee5e725fe5543dcba4ec8b976804e9e88ecd356ea3b62bad97578cea0de1f75"}, +] + +[package.dependencies] +jinja2 = ">=2.11.2,<4" +nornir = ">=3,<4" + +[[package]] +name = "nornir-napalm" +version = "0.4.0" +description = "NAPALM's plugins for nornir" +category = "main" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "nornir_napalm-0.4.0-py3-none-any.whl", hash = "sha256:20a41499aecf9c4e41181b18a73b2ee3ab7763824645ac0eb80abb3973a5f17e"}, + {file = "nornir_napalm-0.4.0.tar.gz", hash = "sha256:84e0711ccbdf24bdb228042ab530bf688d6b2b8f12c65fa3cb73499c6974a9de"}, +] + +[package.dependencies] +napalm = ">=4,<5" +nornir = ">=3,<4" + +[[package]] +name = "nornir-nautobot" +version = "3.1.0" +description = "Nornir Nautobot" +category = "main" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "nornir_nautobot-3.1.0-py3-none-any.whl", hash = "sha256:23197181c17fa6de503679490d04fdc7315133ec5ddc9b549eb0794af9da418f"}, + {file = "nornir_nautobot-3.1.0.tar.gz", hash = "sha256:5bc58d83650fb87aec456358205d455aaa5289345e2bc18f32d6bfa421eec63c"}, +] + +[package.dependencies] +httpx = ">=0.24.1,<0.25.0" +netutils = ">=1.6.0,<2.0.0" +nornir = ">=3.0.0,<4.0.0" +nornir-jinja2 = ">=0.2.0,<0.3.0" +nornir-napalm = ">=0.4.0,<1.0.0" +nornir-netmiko = ">=1,<2" +nornir-utils = ">=0,<1" +pynautobot = ">=2.0.0rc2" +requests = ">=2.25.1,<3.0.0" + +[package.extras] +mikrotik-driver = ["routeros-api (>=0.17.0,<0.18.0)"] + +[[package]] +name = "nornir-netmiko" +version = "1.0.1" +description = "Netmiko's plugins for Nornir" +category = "main" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "nornir_netmiko-1.0.1-py3-none-any.whl", hash = "sha256:eaee2944ad386b40c0719e8ac393ac63d531f44fb9a07d660bae7de430f12834"}, + {file = "nornir_netmiko-1.0.1.tar.gz", hash = "sha256:498546df001e0e499f10c5646d1356e361ccbb165b1335b89cfe8f19765e24d7"}, +] + +[package.dependencies] +netmiko = ">=4.0.0,<5.0.0" + +[[package]] +name = "nornir-utils" +version = "0.2.0" +description = "Collection of plugins and functions for nornir that don't require external dependencies" +category = "main" +optional = false +python-versions = ">=3.6.2,<4.0.0" +files = [ + {file = "nornir_utils-0.2.0-py3-none-any.whl", hash = "sha256:b4c430793a74f03affd5ff2d90abc8c67a28c7ff325f48e3a01a9a44ec71b844"}, + {file = "nornir_utils-0.2.0.tar.gz", hash = "sha256:4de6aaa35e5c1a98e1c84db84a008b0b1e974dc65d88484f2dcea3e30c95fbc2"}, +] + +[package.dependencies] +colorama = ">=0.4.3,<0.5.0" +nornir = ">=3,<4" + [[package]] name = "ntc-templates" -version = "4.0.1" +version = "4.1.0" description = "TextFSM Templates for Network Devices, and Python wrapper for TextFSM's CliTable." category = "main" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8,<4.0" files = [ - {file = "ntc_templates-4.0.1-py3-none-any.whl", hash = "sha256:4d20943fdffc70595fb2b983c6fcab926635c3e4621aaec13a9063a9a61241dd"}, - {file = "ntc_templates-4.0.1.tar.gz", hash = "sha256:5bd158592ac99e769a0b7e82e53fd714a410f912fc9e438e95cc0130cf7290a8"}, + {file = "ntc_templates-4.1.0-py3-none-any.whl", hash = "sha256:61acf390ac22ee87c82c3923ea7cda8b2918f6321973de3b7878beedc2818cb1"}, + {file = "ntc_templates-4.1.0.tar.gz", hash = "sha256:c4985893f347852e1ddbdf8205c098fb23d837185020b4f7f909a547695794df"}, ] [package.dependencies] @@ -2181,14 +2460,14 @@ files = [ [[package]] name = "paramiko" -version = "3.3.1" +version = "3.4.0" description = "SSH2 protocol library" category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "paramiko-3.3.1-py3-none-any.whl", hash = "sha256:b7bc5340a43de4287bbe22fe6de728aa2c22468b2a849615498dd944c2f275eb"}, - {file = "paramiko-3.3.1.tar.gz", hash = "sha256:6a3777a961ac86dbef375c5f5b8d50014a1a96d0fd7f054a43bc880134b0ff77"}, + {file = "paramiko-3.4.0-py3-none-any.whl", hash = "sha256:43f0b51115a896f9c00f59618023484cb3a14b98bbceab43394a39c6739b7ee7"}, + {file = "paramiko-3.4.0.tar.gz", hash = "sha256:aac08f26a31dc4dffd92821527d1682d99d52f9ef6851968114a8728f3c274d3"}, ] [package.dependencies] @@ -2203,14 +2482,14 @@ invoke = ["invoke (>=2.0)"] [[package]] name = "pathspec" -version = "0.11.2" +version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, - {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] [[package]] @@ -2307,14 +2586,14 @@ files = [ [[package]] name = "platformdirs" -version = "4.0.0" +version = "4.1.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "platformdirs-4.0.0-py3-none-any.whl", hash = "sha256:118c954d7e949b35437270383a3f2531e99dd93cf7ce4dc8340d3356d30f173b"}, - {file = "platformdirs-4.0.0.tar.gz", hash = "sha256:cb633b2bcf10c51af60beb0ab06d2f1d69064b43abf4c185ca6b28865f3f9731"}, + {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, + {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, ] [package.extras] @@ -2355,14 +2634,14 @@ test = ["coveralls", "futures", "mock", "pytest (>=2.7.3)", "pytest-benchmark", [[package]] name = "prompt-toolkit" -version = "3.0.41" +version = "3.0.43" description = "Library for building powerful interactive command lines in Python" category = "main" optional = false python-versions = ">=3.7.0" files = [ - {file = "prompt_toolkit-3.0.41-py3-none-any.whl", hash = "sha256:f36fe301fafb7470e86aaf90f036eef600a3210be4decf461a5b1ca8403d3cb2"}, - {file = "prompt_toolkit-3.0.41.tar.gz", hash = "sha256:941367d97fc815548822aa26c2a269fdc4eb21e9ec05fc5d447cf09bad5d75f0"}, + {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, + {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, ] [package.dependencies] @@ -2402,6 +2681,7 @@ files = [ {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, @@ -2410,6 +2690,8 @@ files = [ {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, @@ -2471,6 +2753,59 @@ files = [ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] +[[package]] +name = "pydantic" +version = "1.10.14" +description = "Data validation and settings management using python type hints" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + [[package]] name = "pydocstyle" version = "6.3.0" @@ -2521,14 +2856,14 @@ files = [ [[package]] name = "pygments" -version = "2.17.1" +version = "2.17.2" description = "Pygments is a syntax highlighting package written in Python." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pygments-2.17.1-py3-none-any.whl", hash = "sha256:1b37f1b1e1bff2af52ecaf28cc601e2ef7077000b227a0675da25aef85784bc4"}, - {file = "pygments-2.17.1.tar.gz", hash = "sha256:e45a0e74bf9c530f564ca81b8952343be986a29f6afe7f5ad95c5f06b7bdf5e8"}, + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, ] [package.extras] @@ -2681,6 +3016,22 @@ cffi = ">=1.4.1" docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] +[[package]] +name = "pynautobot" +version = "2.0.1" +description = "Nautobot API client library" +category = "main" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pynautobot-2.0.1-py3-none-any.whl", hash = "sha256:14f9f05ef4c9f8918a56e4892c3badd3c25679aaf5cc6292adcebd7e1ba419c7"}, + {file = "pynautobot-2.0.1.tar.gz", hash = "sha256:de8bf725570baa5bee3a47e2a0de01605ab97e852e5f534b3d8e54a4ed6e2043"}, +] + +[package.dependencies] +requests = ">=2.30.0,<3.0.0" +urllib3 = ">=1.21.1,<1.27" + [[package]] name = "pyparsing" version = "3.1.1" @@ -2796,55 +3147,55 @@ files = [ [[package]] name = "pyuwsgi" -version = "2.0.23" +version = "2.0.23.post0" description = "The uWSGI server" category = "main" optional = false python-versions = "*" files = [ - {file = "pyuwsgi-2.0.23-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0bb538ef57960389d67bcd4a9e7ebb562ed13a4556a5596305ce5361e121fc4e"}, - {file = "pyuwsgi-2.0.23-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efd9689290c3b4afec7d28f1c43ec60f9ee905abf66a501584454cbf6b620678"}, - {file = "pyuwsgi-2.0.23-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80e6fd3a9f49fad9404dd2622116db16990dd9c5061461fd700a82b429f0ee2b"}, - {file = "pyuwsgi-2.0.23-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4796cb1d35eff2cdae6ea01ffb26d2ec0ddf5c692d9f4bf5a28cab61baf78f4"}, - {file = "pyuwsgi-2.0.23-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:366dbc57eaee7b37f3e1c4039fcd7ba2a5693579e17ba07704038ffa28a8be57"}, - {file = "pyuwsgi-2.0.23-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:40ddfcb7d972cac169e62253027f932bb047a995cfbe98398c1451b137e3cf8d"}, - {file = "pyuwsgi-2.0.23-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f4dc785d94878088fd2b4b6da7a630b5538d461b92b6a767cb56401dac1373b9"}, - {file = "pyuwsgi-2.0.23-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbde1da759d1486d6b20938b8f03b84b4dfe4a1b7ba111c586b1eaed6cd85cdc"}, - {file = "pyuwsgi-2.0.23-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12568dbacacd02b22791b352c3e93a9307d565512a851b36483ffe4db69b711e"}, - {file = "pyuwsgi-2.0.23-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9db7b77bf6ee429da0583f36f168bcf1294195d7a4ac53b53d1f5d8ac8c2717"}, - {file = "pyuwsgi-2.0.23-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1de2f99dc4642aea7226889c76083884260920adc14a4a533660479941c6e6f2"}, - {file = "pyuwsgi-2.0.23-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fbad05b405630ddaaf8010822fc8bc553551bcf691df2d1ffbfd4d2204f9973f"}, - {file = "pyuwsgi-2.0.23-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:39107b8abaf488e890d53372bef7b80fdf350b703bbfa2f4ded1002eea31b198"}, - {file = "pyuwsgi-2.0.23-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:feb783ef451dc09cd37b2376ccc9e8ff28d3296542df0351e0a4502c8fac765c"}, - {file = "pyuwsgi-2.0.23-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ea11e270161e5cc8f6935778841f30e3226b0ee3b70185d88d8fa2bf0317bdc9"}, - {file = "pyuwsgi-2.0.23-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3608203a37ebf5580f3fc4901ae1295fd181caa7ec49d29b7dcc1864725049e"}, - {file = "pyuwsgi-2.0.23-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf9d22dd5397a80cf91242f173c4bab0104c7c8b17d286b289a9582a30643cac"}, - {file = "pyuwsgi-2.0.23-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6505cb52b25eecf81338b9f17f4b47ec6288f3911eb65a5a9f3be03ed2ba0b97"}, - {file = "pyuwsgi-2.0.23-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:563270210d79a9e1a76ead34dec40b0ddf1491ac44e02e9d9fd41f8e08938f07"}, - {file = "pyuwsgi-2.0.23-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1883c08aa902dbeb7bd70c5ea319452ecbce49adc715ece4c4bef8c0acfb8523"}, - {file = "pyuwsgi-2.0.23-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:79641c8fccc507288b58805c0edb0540713b9fb65d445d703329606a3fbc2fab"}, - {file = "pyuwsgi-2.0.23-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:02a21ce1175599d0e9d63dc3bb576f7662e1ba3412b746bd9780708f55b35587"}, - {file = "pyuwsgi-2.0.23-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d72e622517522df0e8e04fc1f2aff0d1cafeececc44eecf6f83646f405ef474f"}, - {file = "pyuwsgi-2.0.23-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58cb2c48bfb34b73f5a7586c55d2e29e927a7ca6ca45153e9d860d380f4d6ef1"}, - {file = "pyuwsgi-2.0.23-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7684b4c97bb0d52f3e53f5f67a39241ed1ea234e4a8c351a7ea4a4cfd397909"}, - {file = "pyuwsgi-2.0.23-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4983d2f201d14bf7ed6ec2f6e9449e046440476877e55b1cf6f165d2eb6d3cf4"}, - {file = "pyuwsgi-2.0.23-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:462dccd00ad01a33744a7c061fa2080b58e6b4c0f25cb95e8f9628a42d10f04f"}, - {file = "pyuwsgi-2.0.23-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:764e833b890a82cf94f60087147bd98d8d8769e133e1c1289cd7b8af4d4e19ee"}, - {file = "pyuwsgi-2.0.23-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:120ae908df0b006d1e88b43a3dfbb2f02212ac768d75baefc2a20cdf1b279b11"}, - {file = "pyuwsgi-2.0.23-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78d8ab2ac544a80bfb57a3019f1768e2ca327993f3a2e39aee92b0a70746f0bb"}, - {file = "pyuwsgi-2.0.23-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1e3151b5495e3b1882b07a72e040e7a0422e8e5e58ceafc4cc046428c781f86"}, - {file = "pyuwsgi-2.0.23-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56ba238ccf4e12de0bba0ee7d92e316e3acda22419e3403dc0d474420baf3d71"}, - {file = "pyuwsgi-2.0.23-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:173709af71a86d9efee16a702933fee2ee3e6ac6b7f80eee86414bab0c80838a"}, - {file = "pyuwsgi-2.0.23-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a35ab28beba766f89c7a0db6a6a0fcedb72d7c9ff3262f3f27418bf5b757602e"}, - {file = "pyuwsgi-2.0.23-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0fd1f679c4597641bb30887e9180c42dfabf4b3e7e2747425f4468fe93a17e51"}, - {file = "pyuwsgi-2.0.23-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7c8eca007320f91f4009eca578e3014a443e7f7b33dabb2454754971fd5df4c0"}, - {file = "pyuwsgi-2.0.23-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cebebc9a322f3d5caf19938114d66ff341852756511f99f1892fbc684120501"}, - {file = "pyuwsgi-2.0.23-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c8f2311699e2562670e3ce979bbb566302e7951e758ee80f77a42f1e13a2e221"}, - {file = "pyuwsgi-2.0.23-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf687febfb7f1cfcbedb07762f39279df8725e9e681a859448ee1c1e79a39180"}, - {file = "pyuwsgi-2.0.23-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b27a7dd26e134c134ba0ed17bc28209eb459709480bdc773ce7da5ecc016c81a"}, - {file = "pyuwsgi-2.0.23-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:447a2a72e4285a1617154c496005fbaf1fbf5b3cf6e81186a13e3627ed7b0994"}, - {file = "pyuwsgi-2.0.23-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a13932ba8079b627d6233c8a04c1544bbe2a9007ddeed7f68f46401b1d0c5d5d"}, - {file = "pyuwsgi-2.0.23.tar.gz", hash = "sha256:74ac3e9c641969a3073c67793773a73bd7968ddcc3fa810c5396415e80cc0df1"}, + {file = "pyuwsgi-2.0.23.post0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:49dfe43726f4a71d3440f7a36eb3ba5b361e04807164d34ececda138e2dc2375"}, + {file = "pyuwsgi-2.0.23.post0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65420b185003dd5b66f41a6d1aa03d63d953a18e818bd4a013fc8e9d580f11cb"}, + {file = "pyuwsgi-2.0.23.post0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7bc7c60d8e1242b3a638754d2487c505112c642010c460442993be85f3ca9ec7"}, + {file = "pyuwsgi-2.0.23.post0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1ae2abaa47cb9c0018c790935897aec8001fb709dfac54286a37ab2e0b88dca"}, + {file = "pyuwsgi-2.0.23.post0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:af376cafca1501b2d4b8184c427c55b32c1a3dcb6070dc27115ca552898c7ff8"}, + {file = "pyuwsgi-2.0.23.post0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f56a729808ed7aa1d7973d6f900a75bc36b976b7ab6c8867064f36e34cdafd4e"}, + {file = "pyuwsgi-2.0.23.post0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4270e68bb2633b0fc132aad6d415e4e0cde67093a97e64dd84bd186264a8c083"}, + {file = "pyuwsgi-2.0.23.post0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:97c940a69242dc45658dba3330e64d809f34e33d9631547b6928fd20075b4bb9"}, + {file = "pyuwsgi-2.0.23.post0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8cac396c2e8e0d199bde9bb8fc90538c82207d0c3d722d08b9a63619b41945d6"}, + {file = "pyuwsgi-2.0.23.post0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:59d6a718ad42be54b2b80c8c236b728b8b83fb93438786e95f63fc259229ccd7"}, + {file = "pyuwsgi-2.0.23.post0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c38b5bb59e1bf59030f2d43a3e67aa18e6089c8e7f43e9c5f2099567466d35f4"}, + {file = "pyuwsgi-2.0.23.post0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7199009447770812056a5b417c4847bd44db1b0230d4bb64c48a4ffacd4e96f0"}, + {file = "pyuwsgi-2.0.23.post0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f361d168cf175796fe36ab6a88dee079245a2f08e587e8190a38bd1b33238fa8"}, + {file = "pyuwsgi-2.0.23.post0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:52a45e98fe746ae9c9437c5b6f0cdb6117f979c8800f09c8e4dae2997786affd"}, + {file = "pyuwsgi-2.0.23.post0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f7455976abfa1dd43b5f3376f7f04a925c16babba1c3fc6edcdd81f5c0f24383"}, + {file = "pyuwsgi-2.0.23.post0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:508f5d84cd677cecc640d0e321badc61080c40c61843cd130b32f356729a599f"}, + {file = "pyuwsgi-2.0.23.post0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dcf93afec49f5cf29b0a68f4d2fb3e44a3ad1f205704ab2f41f9db47dacb8e13"}, + {file = "pyuwsgi-2.0.23.post0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a19ab0d5c43bc179a70cb079feb7804e39be6326bf98ec38808fcea5e7d44bd0"}, + {file = "pyuwsgi-2.0.23.post0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8c5283e38c4fd3130cd7384d57535d60435c63b81a41a6463f26f340efeda9de"}, + {file = "pyuwsgi-2.0.23.post0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0d9dfb79bffa552e5985385bc114ecec1d4079b95ce24796f577ef0df727da06"}, + {file = "pyuwsgi-2.0.23.post0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b531ac80155b6c839215d05f95569b34e614e97aab055072c74112b1d2a45546"}, + {file = "pyuwsgi-2.0.23.post0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:eae183104f3fa26f3d9c28fe75f2ad914e3a365103a6a66e329c0f59f9e461d4"}, + {file = "pyuwsgi-2.0.23.post0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a34ab2863ff0120c6e0e75c63c9ced462bfb4777e6b8237e4e1df60fb34af51"}, + {file = "pyuwsgi-2.0.23.post0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc18481f336be63e80fc983aaa1a040e7c69c25c3145edcf93f0e6de2f1ad0d6"}, + {file = "pyuwsgi-2.0.23.post0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:245da016b424c261d148bbb83d2407aac77e6d5793cbd4e23a17f7e3a8aa061f"}, + {file = "pyuwsgi-2.0.23.post0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8de1d975be958cff9122ecc82bf393bf7f41fff6f1047e76ed972047763bbd31"}, + {file = "pyuwsgi-2.0.23.post0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d75859311605a510a6050ec622ec4beb9f2f8cce5f090e5cea70a1ff74133f8b"}, + {file = "pyuwsgi-2.0.23.post0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d3ad00212ffbb208b7146744ad3710b908734f844b5e2bf533fb09fc44726f37"}, + {file = "pyuwsgi-2.0.23.post0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:374142b106de187c4572b4441a367fa3466d9ea5aaabe475da42bb9f2202a690"}, + {file = "pyuwsgi-2.0.23.post0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:137db348bd5f585e8e5a609046d3ac9ef58483bba93de1e3c568c1a860c31b9c"}, + {file = "pyuwsgi-2.0.23.post0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52b7a837dbc8702b245481514a32c88418a42df7b5ee68d45695eba457abd3ee"}, + {file = "pyuwsgi-2.0.23.post0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cfcfeb1eaca5f4dd0e6ed9194e7ec98dcb3a8ac108e8f0414ed7c28d608517ef"}, + {file = "pyuwsgi-2.0.23.post0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7887c2acc8262223ff9cdce974851da0917818c12ef3ec0f49ec11a9943731fe"}, + {file = "pyuwsgi-2.0.23.post0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bae72689ddf8e0bdd1a974a364ed052dd19d7897f1d5c3efcf8d9010c60f56ef"}, + {file = "pyuwsgi-2.0.23.post0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9565569474f9e9f02f6fa490d96d8c5c7e3004829c01c0446cdb74c618b6a433"}, + {file = "pyuwsgi-2.0.23.post0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6ba86c6aa815635eefe7728b9b219af281a4e956bab240c5871db6c151c300a8"}, + {file = "pyuwsgi-2.0.23.post0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29ab8a02e812fbc34026ddb79f274a574c96fc488f384f320d3af37bd7edf932"}, + {file = "pyuwsgi-2.0.23.post0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4f9c0694a11d8dfbbe2814b8b242a7c4dfa143b63e01447fabce9966a90fa60"}, + {file = "pyuwsgi-2.0.23.post0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f75e45e14462cbb94fc32242378eef7bda97173de57a68a5d46e4053677a7547"}, + {file = "pyuwsgi-2.0.23.post0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e7140fc3548cd9d0f02c4511b679ba47d26593d2cceb249d2d147c9901d90022"}, + {file = "pyuwsgi-2.0.23.post0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ed348cc4c5a4964c8e8fa61ab0ef50c00f7676179a6c0cb0f55f0122db1db1c2"}, + {file = "pyuwsgi-2.0.23.post0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17a8818ec98f92e7935cf0ff56ed4f02a069362e10554df969f70fcdf78d9199"}, + {file = "pyuwsgi-2.0.23.post0.tar.gz", hash = "sha256:04ec79c4a3acad21002ebf1479050c3208605d27cc6659008df51092951eeb8e"}, ] [[package]] @@ -2860,6 +3211,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2867,8 +3219,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2885,6 +3244,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2892,6 +3252,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -2933,14 +3294,14 @@ ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)" [[package]] name = "referencing" -version = "0.31.0" +version = "0.32.1" description = "JSON Referencing + Python" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "referencing-0.31.0-py3-none-any.whl", hash = "sha256:381b11e53dd93babb55696c71cf42aef2d36b8a150c49bf0bc301e36d536c882"}, - {file = "referencing-0.31.0.tar.gz", hash = "sha256:cc28f2c88fbe7b961a7817a0abc034c09a1e36358f82fedb4ffdf29a25398863"}, + {file = "referencing-0.32.1-py3-none-any.whl", hash = "sha256:7e4dc12271d8e15612bfe35792f5ea1c40970dadf8624602e33db2758f7ee554"}, + {file = "referencing-0.32.1.tar.gz", hash = "sha256:3c57da0513e9563eb7e203ebe9bb3a1b509b042016433bd1e45a2853466c3dd3"}, ] [package.dependencies] @@ -2949,100 +3310,105 @@ rpds-py = ">=0.7.0" [[package]] name = "regex" -version = "2023.10.3" +version = "2023.12.25" description = "Alternative regular expression module, to replace re." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "regex-2023.10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c34d4f73ea738223a094d8e0ffd6d2c1a1b4c175da34d6b0de3d8d69bee6bcc"}, - {file = "regex-2023.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8f4e49fc3ce020f65411432183e6775f24e02dff617281094ba6ab079ef0915"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cd1bccf99d3ef1ab6ba835308ad85be040e6a11b0977ef7ea8c8005f01a3c29"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:81dce2ddc9f6e8f543d94b05d56e70d03a0774d32f6cca53e978dc01e4fc75b8"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c6b4d23c04831e3ab61717a707a5d763b300213db49ca680edf8bf13ab5d91b"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c15ad0aee158a15e17e0495e1e18741573d04eb6da06d8b84af726cfc1ed02ee"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6239d4e2e0b52c8bd38c51b760cd870069f0bdf99700a62cd509d7a031749a55"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4a8bf76e3182797c6b1afa5b822d1d5802ff30284abe4599e1247be4fd6b03be"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9c727bbcf0065cbb20f39d2b4f932f8fa1631c3e01fcedc979bd4f51fe051c5"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3ccf2716add72f80714b9a63899b67fa711b654be3fcdd34fa391d2d274ce767"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:107ac60d1bfdc3edb53be75e2a52aff7481b92817cfdddd9b4519ccf0e54a6ff"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:00ba3c9818e33f1fa974693fb55d24cdc8ebafcb2e4207680669d8f8d7cca79a"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f0a47efb1dbef13af9c9a54a94a0b814902e547b7f21acb29434504d18f36e3a"}, - {file = "regex-2023.10.3-cp310-cp310-win32.whl", hash = "sha256:36362386b813fa6c9146da6149a001b7bd063dabc4d49522a1f7aa65b725c7ec"}, - {file = "regex-2023.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:c65a3b5330b54103e7d21cac3f6bf3900d46f6d50138d73343d9e5b2900b2353"}, - {file = "regex-2023.10.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:90a79bce019c442604662d17bf69df99090e24cdc6ad95b18b6725c2988a490e"}, - {file = "regex-2023.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c7964c2183c3e6cce3f497e3a9f49d182e969f2dc3aeeadfa18945ff7bdd7051"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ef80829117a8061f974b2fda8ec799717242353bff55f8a29411794d635d964"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5addc9d0209a9afca5fc070f93b726bf7003bd63a427f65ef797a931782e7edc"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c148bec483cc4b421562b4bcedb8e28a3b84fcc8f0aa4418e10898f3c2c0eb9b"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d1f21af4c1539051049796a0f50aa342f9a27cde57318f2fc41ed50b0dbc4ac"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b9ac09853b2a3e0d0082104036579809679e7715671cfbf89d83c1cb2a30f58"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ebedc192abbc7fd13c5ee800e83a6df252bec691eb2c4bedc9f8b2e2903f5e2a"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d8a993c0a0ffd5f2d3bda23d0cd75e7086736f8f8268de8a82fbc4bd0ac6791e"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:be6b7b8d42d3090b6c80793524fa66c57ad7ee3fe9722b258aec6d0672543fd0"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4023e2efc35a30e66e938de5aef42b520c20e7eda7bb5fb12c35e5d09a4c43f6"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0d47840dc05e0ba04fe2e26f15126de7c755496d5a8aae4a08bda4dd8d646c54"}, - {file = "regex-2023.10.3-cp311-cp311-win32.whl", hash = "sha256:9145f092b5d1977ec8c0ab46e7b3381b2fd069957b9862a43bd383e5c01d18c2"}, - {file = "regex-2023.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:b6104f9a46bd8743e4f738afef69b153c4b8b592d35ae46db07fc28ae3d5fb7c"}, - {file = "regex-2023.10.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bff507ae210371d4b1fe316d03433ac099f184d570a1a611e541923f78f05037"}, - {file = "regex-2023.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:be5e22bbb67924dea15039c3282fa4cc6cdfbe0cbbd1c0515f9223186fc2ec5f"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a992f702c9be9c72fa46f01ca6e18d131906a7180950958f766c2aa294d4b41"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7434a61b158be563c1362d9071358f8ab91b8d928728cd2882af060481244c9e"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2169b2dcabf4e608416f7f9468737583ce5f0a6e8677c4efbf795ce81109d7c"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9e908ef5889cda4de038892b9accc36d33d72fb3e12c747e2799a0e806ec841"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12bd4bc2c632742c7ce20db48e0d99afdc05e03f0b4c1af90542e05b809a03d9"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bc72c231f5449d86d6c7d9cc7cd819b6eb30134bb770b8cfdc0765e48ef9c420"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bce8814b076f0ce5766dc87d5a056b0e9437b8e0cd351b9a6c4e1134a7dfbda9"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:ba7cd6dc4d585ea544c1412019921570ebd8a597fabf475acc4528210d7c4a6f"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b0c7d2f698e83f15228ba41c135501cfe7d5740181d5903e250e47f617eb4292"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5a8f91c64f390ecee09ff793319f30a0f32492e99f5dc1c72bc361f23ccd0a9a"}, - {file = "regex-2023.10.3-cp312-cp312-win32.whl", hash = "sha256:ad08a69728ff3c79866d729b095872afe1e0557251da4abb2c5faff15a91d19a"}, - {file = "regex-2023.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:39cdf8d141d6d44e8d5a12a8569d5a227f645c87df4f92179bd06e2e2705e76b"}, - {file = "regex-2023.10.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4a3ee019a9befe84fa3e917a2dd378807e423d013377a884c1970a3c2792d293"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76066d7ff61ba6bf3cb5efe2428fc82aac91802844c022d849a1f0f53820502d"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe50b61bab1b1ec260fa7cd91106fa9fece57e6beba05630afe27c71259c59b"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fd88f373cb71e6b59b7fa597e47e518282455c2734fd4306a05ca219a1991b0"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ab05a182c7937fb374f7e946f04fb23a0c0699c0450e9fb02ef567412d2fa3"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dac37cf08fcf2094159922edc7a2784cfcc5c70f8354469f79ed085f0328ebdf"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e54ddd0bb8fb626aa1f9ba7b36629564544954fff9669b15da3610c22b9a0991"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3367007ad1951fde612bf65b0dffc8fd681a4ab98ac86957d16491400d661302"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:16f8740eb6dbacc7113e3097b0a36065a02e37b47c936b551805d40340fb9971"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:f4f2ca6df64cbdd27f27b34f35adb640b5d2d77264228554e68deda54456eb11"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:39807cbcbe406efca2a233884e169d056c35aa7e9f343d4e78665246a332f597"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7eece6fbd3eae4a92d7c748ae825cbc1ee41a89bb1c3db05b5578ed3cfcfd7cb"}, - {file = "regex-2023.10.3-cp37-cp37m-win32.whl", hash = "sha256:ce615c92d90df8373d9e13acddd154152645c0dc060871abf6bd43809673d20a"}, - {file = "regex-2023.10.3-cp37-cp37m-win_amd64.whl", hash = "sha256:0f649fa32fe734c4abdfd4edbb8381c74abf5f34bc0b3271ce687b23729299ed"}, - {file = "regex-2023.10.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9b98b7681a9437262947f41c7fac567c7e1f6eddd94b0483596d320092004533"}, - {file = "regex-2023.10.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:91dc1d531f80c862441d7b66c4505cd6ea9d312f01fb2f4654f40c6fdf5cc37a"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82fcc1f1cc3ff1ab8a57ba619b149b907072e750815c5ba63e7aa2e1163384a4"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7979b834ec7a33aafae34a90aad9f914c41fd6eaa8474e66953f3f6f7cbd4368"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef71561f82a89af6cfcbee47f0fabfdb6e63788a9258e913955d89fdd96902ab"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd829712de97753367153ed84f2de752b86cd1f7a88b55a3a775eb52eafe8a94"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00e871d83a45eee2f8688d7e6849609c2ca2a04a6d48fba3dff4deef35d14f07"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:706e7b739fdd17cb89e1fbf712d9dc21311fc2333f6d435eac2d4ee81985098c"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cc3f1c053b73f20c7ad88b0d1d23be7e7b3901229ce89f5000a8399746a6e039"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6f85739e80d13644b981a88f529d79c5bdf646b460ba190bffcaf6d57b2a9863"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:741ba2f511cc9626b7561a440f87d658aabb3d6b744a86a3c025f866b4d19e7f"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e77c90ab5997e85901da85131fd36acd0ed2221368199b65f0d11bca44549711"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:979c24cbefaf2420c4e377ecd1f165ea08cc3d1fbb44bdc51bccbbf7c66a2cb4"}, - {file = "regex-2023.10.3-cp38-cp38-win32.whl", hash = "sha256:58837f9d221744d4c92d2cf7201c6acd19623b50c643b56992cbd2b745485d3d"}, - {file = "regex-2023.10.3-cp38-cp38-win_amd64.whl", hash = "sha256:c55853684fe08d4897c37dfc5faeff70607a5f1806c8be148f1695be4a63414b"}, - {file = "regex-2023.10.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2c54e23836650bdf2c18222c87f6f840d4943944146ca479858404fedeb9f9af"}, - {file = "regex-2023.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69c0771ca5653c7d4b65203cbfc5e66db9375f1078689459fe196fe08b7b4930"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ac965a998e1388e6ff2e9781f499ad1eaa41e962a40d11c7823c9952c77123e"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c0e8fae5b27caa34177bdfa5a960c46ff2f78ee2d45c6db15ae3f64ecadde14"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c56c3d47da04f921b73ff9415fbaa939f684d47293f071aa9cbb13c94afc17d"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ef1e014eed78ab650bef9a6a9cbe50b052c0aebe553fb2881e0453717573f52"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d29338556a59423d9ff7b6eb0cb89ead2b0875e08fe522f3e068b955c3e7b59b"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9c6d0ced3c06d0f183b73d3c5920727268d2201aa0fe6d55c60d68c792ff3588"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:994645a46c6a740ee8ce8df7911d4aee458d9b1bc5639bc968226763d07f00fa"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:66e2fe786ef28da2b28e222c89502b2af984858091675044d93cb50e6f46d7af"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:11175910f62b2b8c055f2b089e0fedd694fe2be3941b3e2633653bc51064c528"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:06e9abc0e4c9ab4779c74ad99c3fc10d3967d03114449acc2c2762ad4472b8ca"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fb02e4257376ae25c6dd95a5aec377f9b18c09be6ebdefa7ad209b9137b73d48"}, - {file = "regex-2023.10.3-cp39-cp39-win32.whl", hash = "sha256:3b2c3502603fab52d7619b882c25a6850b766ebd1b18de3df23b2f939360e1bd"}, - {file = "regex-2023.10.3-cp39-cp39-win_amd64.whl", hash = "sha256:adbccd17dcaff65704c856bd29951c58a1bd4b2b0f8ad6b826dbd543fe740988"}, - {file = "regex-2023.10.3.tar.gz", hash = "sha256:3fef4f844d2290ee0ba57addcec17eec9e3df73f10a2748485dfd6a3a188cc0f"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, + {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, + {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, + {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, + {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, + {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, + {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, + {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, + {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, + {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, + {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, + {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, + {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, + {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, + {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, ] [[package]] @@ -3108,111 +3474,190 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "rpds-py" -version = "0.13.1" +version = "0.17.1" description = "Python bindings to Rust's persistent data structures (rpds)" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "rpds_py-0.13.1-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:83feb0f682d75a09ddc11aa37ba5c07dd9b824b22915207f6176ea458474ff75"}, - {file = "rpds_py-0.13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fa84bbe22ffa108f91631935c28a623001e335d66e393438258501e618fb0dde"}, - {file = "rpds_py-0.13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e04f8c76b8d5c70695b4e8f1d0b391d8ef91df00ef488c6c1ffb910176459bc6"}, - {file = "rpds_py-0.13.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:032c242a595629aacace44128f9795110513ad27217b091e834edec2fb09e800"}, - {file = "rpds_py-0.13.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91276caef95556faeb4b8f09fe4439670d3d6206fee78d47ddb6e6de837f0b4d"}, - {file = "rpds_py-0.13.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d22f2cb82e0b40e427a74a93c9a4231335bbc548aed79955dde0b64ea7f88146"}, - {file = "rpds_py-0.13.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63c9e2794329ef070844ff9bfc012004aeddc0468dc26970953709723f76c8a5"}, - {file = "rpds_py-0.13.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c797ea56f36c6f248656f0223b11307fdf4a1886f3555eba371f34152b07677f"}, - {file = "rpds_py-0.13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:82dbcd6463e580bcfb7561cece35046aaabeac5a9ddb775020160b14e6c58a5d"}, - {file = "rpds_py-0.13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:736817dbbbd030a69a1faf5413a319976c9c8ba8cdcfa98c022d3b6b2e01eca6"}, - {file = "rpds_py-0.13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1f36a1e80ef4ed1996445698fd91e0d3e54738bf597c9995118b92da537d7a28"}, - {file = "rpds_py-0.13.1-cp310-none-win32.whl", hash = "sha256:4f13d3f6585bd07657a603780e99beda96a36c86acaba841f131e81393958336"}, - {file = "rpds_py-0.13.1-cp310-none-win_amd64.whl", hash = "sha256:545e94c84575057d3d5c62634611858dac859702b1519b6ffc58eca7fb1adfcf"}, - {file = "rpds_py-0.13.1-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:6bfe72b249264cc1ff2f3629be240d7d2fdc778d9d298087cdec8524c91cd11f"}, - {file = "rpds_py-0.13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edc91c50e17f5cd945d821f0f1af830522dba0c10267c3aab186dc3dbaab8def"}, - {file = "rpds_py-0.13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2eca04a365be380ca1f8fa48b334462e19e3382c0bb7386444d8ca43aa01c481"}, - {file = "rpds_py-0.13.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3e3ac5b602fea378243f993d8b707189f9061e55ebb4e56cb9fdef8166060f28"}, - {file = "rpds_py-0.13.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dfb5d2ab183c0efe5e7b8917e4eaa2e837aacafad8a69b89aa6bc81550eed857"}, - {file = "rpds_py-0.13.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d9793d46d3e6522ae58e9321032827c9c0df1e56cbe5d3de965facb311aed6aa"}, - {file = "rpds_py-0.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cd935c0220d012a27c20135c140f9cdcbc6249d5954345c81bfb714071b985c"}, - {file = "rpds_py-0.13.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:37b08df45f02ff1866043b95096cbe91ac99de05936dd09d6611987a82a3306a"}, - {file = "rpds_py-0.13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ad666a904212aa9a6c77da7dce9d5170008cda76b7776e6731928b3f8a0d40fa"}, - {file = "rpds_py-0.13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8a6ad8429340e0a4de89353447c6441329def3632e7b2293a7d6e873217d3c2b"}, - {file = "rpds_py-0.13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7c40851b659d958c5245c1236e34f0d065cc53dca8d978b49a032c8e0adfda6e"}, - {file = "rpds_py-0.13.1-cp311-none-win32.whl", hash = "sha256:4145172ab59b6c27695db6d78d040795f635cba732cead19c78cede74800949a"}, - {file = "rpds_py-0.13.1-cp311-none-win_amd64.whl", hash = "sha256:46a07a258bda12270de02b34c4884f200f864bba3dcd6e3a37fef36a168b859d"}, - {file = "rpds_py-0.13.1-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:ba4432301ad7eeb1b00848cf46fae0e5fecfd18a8cb5fdcf856c67985f79ecc7"}, - {file = "rpds_py-0.13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d22e0660de24bd8e9ac82f4230a22a5fe4e397265709289d61d5fb333839ba50"}, - {file = "rpds_py-0.13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76a8374b294e4ccb39ccaf11d39a0537ed107534139c00b4393ca3b542cc66e5"}, - {file = "rpds_py-0.13.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7d152ec7bb431040af2500e01436c9aa0d993f243346f0594a15755016bf0be1"}, - {file = "rpds_py-0.13.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74a2044b870df7c9360bb3ce7e12f9ddf8e72e49cd3a353a1528cbf166ad2383"}, - {file = "rpds_py-0.13.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:960e7e460fda2d0af18c75585bbe0c99f90b8f09963844618a621b804f8c3abe"}, - {file = "rpds_py-0.13.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37f79f4f1f06cc96151f4a187528c3fd4a7e1065538a4af9eb68c642365957f7"}, - {file = "rpds_py-0.13.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cd4ea56c9542ad0091dfdef3e8572ae7a746e1e91eb56c9e08b8d0808b40f1d1"}, - {file = "rpds_py-0.13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0290712eb5603a725769b5d857f7cf15cf6ca93dda3128065bbafe6fdb709beb"}, - {file = "rpds_py-0.13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0b70c1f800059c92479dc94dda41288fd6607f741f9b1b8f89a21a86428f6383"}, - {file = "rpds_py-0.13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3dd5fb7737224e1497c886fb3ca681c15d9c00c76171f53b3c3cc8d16ccfa7fb"}, - {file = "rpds_py-0.13.1-cp312-none-win32.whl", hash = "sha256:74be3b215a5695690a0f1a9f68b1d1c93f8caad52e23242fcb8ba56aaf060281"}, - {file = "rpds_py-0.13.1-cp312-none-win_amd64.whl", hash = "sha256:f47eef55297799956464efc00c74ae55c48a7b68236856d56183fe1ddf866205"}, - {file = "rpds_py-0.13.1-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:e4a45ba34f904062c63049a760790c6a2fa7a4cc4bd160d8af243b12371aaa05"}, - {file = "rpds_py-0.13.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:20147996376be452cd82cd6c17701daba69a849dc143270fa10fe067bb34562a"}, - {file = "rpds_py-0.13.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b9535aa22ab023704cfc6533e968f7e420affe802d85e956d8a7b4c0b0b5ea"}, - {file = "rpds_py-0.13.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d4fa1eeb9bea6d9b64ac91ec51ee94cc4fc744955df5be393e1c923c920db2b0"}, - {file = "rpds_py-0.13.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b2415d5a7b7ee96aa3a54d4775c1fec140476a17ee12353806297e900eaeddc"}, - {file = "rpds_py-0.13.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:577d40a72550eac1386b77b43836151cb61ff6700adacda2ad4d883ca5a0b6f2"}, - {file = "rpds_py-0.13.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af2d1648eb625a460eee07d3e1ea3a4a6e84a1fb3a107f6a8e95ac19f7dcce67"}, - {file = "rpds_py-0.13.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5b769396eb358d6b55dbf78f3f7ca631ca1b2fe02136faad5af74f0111b4b6b7"}, - {file = "rpds_py-0.13.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:249c8e0055ca597707d71c5ad85fd2a1c8fdb99386a8c6c257e1b47b67a9bec1"}, - {file = "rpds_py-0.13.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:fe30ef31172bdcf946502a945faad110e8fff88c32c4bec9a593df0280e64d8a"}, - {file = "rpds_py-0.13.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2647192facf63be9ed2d7a49ceb07efe01dc6cfb083bd2cc53c418437400cb99"}, - {file = "rpds_py-0.13.1-cp38-none-win32.whl", hash = "sha256:4011d5c854aa804c833331d38a2b6f6f2fe58a90c9f615afdb7aa7cf9d31f721"}, - {file = "rpds_py-0.13.1-cp38-none-win_amd64.whl", hash = "sha256:7cfae77da92a20f56cf89739a557b76e5c6edc094f6ad5c090b9e15fbbfcd1a4"}, - {file = "rpds_py-0.13.1-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:e9be1f7c5f9673616f875299339984da9447a40e3aea927750c843d6e5e2e029"}, - {file = "rpds_py-0.13.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:839676475ac2ccd1532d36af3d10d290a2ca149b702ed464131e450a767550df"}, - {file = "rpds_py-0.13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90031658805c63fe488f8e9e7a88b260ea121ba3ee9cdabcece9c9ddb50da39"}, - {file = "rpds_py-0.13.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ba9fbc5d6e36bfeb5292530321cc56c4ef3f98048647fabd8f57543c34174ec"}, - {file = "rpds_py-0.13.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08832078767545c5ee12561ce980714e1e4c6619b5b1e9a10248de60cddfa1fd"}, - {file = "rpds_py-0.13.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19f5aa7f5078d35ed8e344bcba40f35bc95f9176dddb33fc4f2084e04289fa63"}, - {file = "rpds_py-0.13.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80080972e1d000ad0341c7cc58b6855c80bd887675f92871221451d13a975072"}, - {file = "rpds_py-0.13.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:181ee352691c4434eb1c01802e9daa5edcc1007ff15023a320e2693fed6a661b"}, - {file = "rpds_py-0.13.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d20da6b4c7aa9ee75ad0730beaba15d65157f5beeaca54a038bb968f92bf3ce3"}, - {file = "rpds_py-0.13.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:faa12a9f34671a30ea6bb027f04ec4e1fb8fa3fb3ed030893e729d4d0f3a9791"}, - {file = "rpds_py-0.13.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7cf241dbb50ea71c2e628ab2a32b5bfcd36e199152fc44e5c1edb0b773f1583e"}, - {file = "rpds_py-0.13.1-cp39-none-win32.whl", hash = "sha256:dab979662da1c9fbb464e310c0b06cb5f1d174d09a462553af78f0bfb3e01920"}, - {file = "rpds_py-0.13.1-cp39-none-win_amd64.whl", hash = "sha256:a2b3c79586636f1fa69a7bd59c87c15fca80c0d34b5c003d57f2f326e5276575"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:5967fa631d0ed9f8511dede08bc943a9727c949d05d1efac4ac82b2938024fb7"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8308a8d49d1354278d5c068c888a58d7158a419b2e4d87c7839ed3641498790c"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0580faeb9def6d0beb7aa666294d5604e569c4e24111ada423cf9936768d95c"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2da81c1492291c1a90987d76a47c7b2d310661bf7c93a9de0511e27b796a8b46"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c9a1dc5e898ce30e2f9c0aa57181cddd4532b22b7780549441d6429d22d3b58"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4ae6f423cb7d1c6256b7482025ace2825728f53b7ac58bcd574de6ee9d242c2"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc3179e0815827cf963e634095ae5715ee73a5af61defbc8d6ca79f1bdae1d1d"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0d9f8930092558fd15c9e07198625efb698f7cc00b3dc311c83eeec2540226a8"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:d1d388d2f5f5a6065cf83c54dd12112b7389095669ff395e632003ae8999c6b8"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:08b335fb0c45f0a9e2478a9ece6a1bfb00b6f4c4780f9be3cf36479c5d8dd374"}, - {file = "rpds_py-0.13.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:d11afdc5992bbd7af60ed5eb519873690d921425299f51d80aa3099ed49f2bcc"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:8c1f6c8df23be165eb0cb78f305483d00c6827a191e3a38394c658d5b9c80bbd"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:528e2afaa56d815d2601b857644aeb395afe7e59212ab0659906dc29ae68d9a6"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df2af1180b8eeececf4f819d22cc0668bfadadfd038b19a90bd2fb2ee419ec6f"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:88956c993a20201744282362e3fd30962a9d86dc4f1dcf2bdb31fab27821b61f"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee70ee5f4144a45a9e6169000b5b525d82673d5dab9f7587eccc92794814e7ac"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c5fd099acaee2325f01281a130a39da08d885e4dedf01b84bf156ec2737d78fe"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9656a09653b18b80764647d585750df2dff8928e03a706763ab40ec8c4872acc"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7ba239bb37663b2b4cd08e703e79e13321512dccd8e5f0e9451d9e53a6b8509a"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:3f55ae773abd96b1de25fc5c3fb356f491bd19116f8f854ba705beffc1ddc3c5"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:f4b15a163448ec79241fb2f1bc5a8ae1a4a304f7a48d948d208a2935b26bf8a5"}, - {file = "rpds_py-0.13.1-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:1a3b2583c86bbfbf417304eeb13400ce7f8725376dc7d3efbf35dc5d7052ad48"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:f1059ca9a51c936c9a8d46fbc2c9a6b4c15ab3f13a97f1ad32f024b39666ba85"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f55601fb58f92e4f4f1d05d80c24cb77505dc42103ddfd63ddfdc51d3da46fa2"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcfd5f91b882eedf8d9601bd21261d6ce0e61a8c66a7152d1f5df08d3f643ab1"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6574f619e8734140d96c59bfa8a6a6e7a3336820ccd1bfd95ffa610673b650a2"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a4b9d3f5c48bbe8d9e3758e498b3c34863f2c9b1ac57a4e6310183740e59c980"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cdd6f8738e1f1d9df5b1603bb03cb30e442710e5672262b95d0f9fcb4edb0dab"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8c2bf286e5d755a075e5e97ba56b3de08cccdad6b323ab0b21cc98875176b03"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b3d4b390ee70ca9263b331ccfaf9819ee20e90dfd0201a295e23eb64a005dbef"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:db8d0f0ad92f74feb61c4e4a71f1d573ef37c22ef4dc19cab93e501bfdad8cbd"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:2abd669a39be69cdfe145927c7eb53a875b157740bf1e2d49e9619fc6f43362e"}, - {file = "rpds_py-0.13.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:2c173f529666bab8e3f948b74c6d91afa22ea147e6ebae49a48229d9020a47c4"}, - {file = "rpds_py-0.13.1.tar.gz", hash = "sha256:264f3a5906c62b9df3a00ad35f6da1987d321a053895bd85f9d5c708de5c0fbf"}, + {file = "rpds_py-0.17.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:4128980a14ed805e1b91a7ed551250282a8ddf8201a4e9f8f5b7e6225f54170d"}, + {file = "rpds_py-0.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ff1dcb8e8bc2261a088821b2595ef031c91d499a0c1b031c152d43fe0a6ecec8"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d65e6b4f1443048eb7e833c2accb4fa7ee67cc7d54f31b4f0555b474758bee55"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a71169d505af63bb4d20d23a8fbd4c6ce272e7bce6cc31f617152aa784436f29"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:436474f17733c7dca0fbf096d36ae65277e8645039df12a0fa52445ca494729d"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10162fe3f5f47c37ebf6d8ff5a2368508fe22007e3077bf25b9c7d803454d921"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:720215373a280f78a1814becb1312d4e4d1077b1202a56d2b0815e95ccb99ce9"}, + {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70fcc6c2906cfa5c6a552ba7ae2ce64b6c32f437d8f3f8eea49925b278a61453"}, + {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:91e5a8200e65aaac342a791272c564dffcf1281abd635d304d6c4e6b495f29dc"}, + {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:99f567dae93e10be2daaa896e07513dd4bf9c2ecf0576e0533ac36ba3b1d5394"}, + {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:24e4900a6643f87058a27320f81336d527ccfe503984528edde4bb660c8c8d59"}, + {file = "rpds_py-0.17.1-cp310-none-win32.whl", hash = "sha256:0bfb09bf41fe7c51413f563373e5f537eaa653d7adc4830399d4e9bdc199959d"}, + {file = "rpds_py-0.17.1-cp310-none-win_amd64.whl", hash = "sha256:20de7b7179e2031a04042e85dc463a93a82bc177eeba5ddd13ff746325558aa6"}, + {file = "rpds_py-0.17.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:65dcf105c1943cba45d19207ef51b8bc46d232a381e94dd38719d52d3980015b"}, + {file = "rpds_py-0.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:01f58a7306b64e0a4fe042047dd2b7d411ee82e54240284bab63e325762c1147"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:071bc28c589b86bc6351a339114fb7a029f5cddbaca34103aa573eba7b482382"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ae35e8e6801c5ab071b992cb2da958eee76340e6926ec693b5ff7d6381441745"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149c5cd24f729e3567b56e1795f74577aa3126c14c11e457bec1b1c90d212e38"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e796051f2070f47230c745d0a77a91088fbee2cc0502e9b796b9c6471983718c"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e820ee1004327609b28db8307acc27f5f2e9a0b185b2064c5f23e815f248f8"}, + {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1957a2ab607f9added64478a6982742eb29f109d89d065fa44e01691a20fc20a"}, + {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8587fd64c2a91c33cdc39d0cebdaf30e79491cc029a37fcd458ba863f8815383"}, + {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4dc889a9d8a34758d0fcc9ac86adb97bab3fb7f0c4d29794357eb147536483fd"}, + {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2953937f83820376b5979318840f3ee47477d94c17b940fe31d9458d79ae7eea"}, + {file = "rpds_py-0.17.1-cp311-none-win32.whl", hash = "sha256:1bfcad3109c1e5ba3cbe2f421614e70439f72897515a96c462ea657261b96518"}, + {file = "rpds_py-0.17.1-cp311-none-win_amd64.whl", hash = "sha256:99da0a4686ada4ed0f778120a0ea8d066de1a0a92ab0d13ae68492a437db78bf"}, + {file = "rpds_py-0.17.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1dc29db3900cb1bb40353772417800f29c3d078dbc8024fd64655a04ee3c4bdf"}, + {file = "rpds_py-0.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:82ada4a8ed9e82e443fcef87e22a3eed3654dd3adf6e3b3a0deb70f03e86142a"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d36b2b59e8cc6e576f8f7b671e32f2ff43153f0ad6d0201250a7c07f25d570e"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3677fcca7fb728c86a78660c7fb1b07b69b281964673f486ae72860e13f512ad"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:516fb8c77805159e97a689e2f1c80655c7658f5af601c34ffdb916605598cda2"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df3b6f45ba4515632c5064e35ca7f31d51d13d1479673185ba8f9fefbbed58b9"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a967dd6afda7715d911c25a6ba1517975acd8d1092b2f326718725461a3d33f9"}, + {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dbbb95e6fc91ea3102505d111b327004d1c4ce98d56a4a02e82cd451f9f57140"}, + {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:02866e060219514940342a1f84303a1ef7a1dad0ac311792fbbe19b521b489d2"}, + {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2528ff96d09f12e638695f3a2e0c609c7b84c6df7c5ae9bfeb9252b6fa686253"}, + {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd345a13ce06e94c753dab52f8e71e5252aec1e4f8022d24d56decd31e1b9b23"}, + {file = "rpds_py-0.17.1-cp312-none-win32.whl", hash = "sha256:2a792b2e1d3038daa83fa474d559acfd6dc1e3650ee93b2662ddc17dbff20ad1"}, + {file = "rpds_py-0.17.1-cp312-none-win_amd64.whl", hash = "sha256:292f7344a3301802e7c25c53792fae7d1593cb0e50964e7bcdcc5cf533d634e3"}, + {file = "rpds_py-0.17.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:8ffe53e1d8ef2520ebcf0c9fec15bb721da59e8ef283b6ff3079613b1e30513d"}, + {file = "rpds_py-0.17.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4341bd7579611cf50e7b20bb8c2e23512a3dc79de987a1f411cb458ab670eb90"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4eb548daf4836e3b2c662033bfbfc551db58d30fd8fe660314f86bf8510b93"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b686f25377f9c006acbac63f61614416a6317133ab7fafe5de5f7dc8a06d42eb"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4e21b76075c01d65d0f0f34302b5a7457d95721d5e0667aea65e5bb3ab415c25"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b86b21b348f7e5485fae740d845c65a880f5d1eda1e063bc59bef92d1f7d0c55"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f175e95a197f6a4059b50757a3dca33b32b61691bdbd22c29e8a8d21d3914cae"}, + {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1701fc54460ae2e5efc1dd6350eafd7a760f516df8dbe51d4a1c79d69472fbd4"}, + {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9051e3d2af8f55b42061603e29e744724cb5f65b128a491446cc029b3e2ea896"}, + {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:7450dbd659fed6dd41d1a7d47ed767e893ba402af8ae664c157c255ec6067fde"}, + {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5a024fa96d541fd7edaa0e9d904601c6445e95a729a2900c5aec6555fe921ed6"}, + {file = "rpds_py-0.17.1-cp38-none-win32.whl", hash = "sha256:da1ead63368c04a9bded7904757dfcae01eba0e0f9bc41d3d7f57ebf1c04015a"}, + {file = "rpds_py-0.17.1-cp38-none-win_amd64.whl", hash = "sha256:841320e1841bb53fada91c9725e766bb25009cfd4144e92298db296fb6c894fb"}, + {file = "rpds_py-0.17.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:f6c43b6f97209e370124baf2bf40bb1e8edc25311a158867eb1c3a5d449ebc7a"}, + {file = "rpds_py-0.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7d63ec01fe7c76c2dbb7e972fece45acbb8836e72682bde138e7e039906e2c"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81038ff87a4e04c22e1d81f947c6ac46f122e0c80460b9006e6517c4d842a6ec"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:810685321f4a304b2b55577c915bece4c4a06dfe38f6e62d9cc1d6ca8ee86b99"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:25f071737dae674ca8937a73d0f43f5a52e92c2d178330b4c0bb6ab05586ffa6"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa5bfb13f1e89151ade0eb812f7b0d7a4d643406caaad65ce1cbabe0a66d695f"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfe07308b311a8293a0d5ef4e61411c5c20f682db6b5e73de6c7c8824272c256"}, + {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a000133a90eea274a6f28adc3084643263b1e7c1a5a66eb0a0a7a36aa757ed74"}, + {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d0e8a6434a3fbf77d11448c9c25b2f25244226cfbec1a5159947cac5b8c5fa4"}, + {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:efa767c220d94aa4ac3a6dd3aeb986e9f229eaf5bce92d8b1b3018d06bed3772"}, + {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:dbc56680ecf585a384fbd93cd42bc82668b77cb525343170a2d86dafaed2a84b"}, + {file = "rpds_py-0.17.1-cp39-none-win32.whl", hash = "sha256:270987bc22e7e5a962b1094953ae901395e8c1e1e83ad016c5cfcfff75a15a3f"}, + {file = "rpds_py-0.17.1-cp39-none-win_amd64.whl", hash = "sha256:2a7b2f2f56a16a6d62e55354dd329d929560442bd92e87397b7a9586a32e3e76"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a3264e3e858de4fc601741498215835ff324ff2482fd4e4af61b46512dd7fc83"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f2f3b28b40fddcb6c1f1f6c88c6f3769cd933fa493ceb79da45968a21dccc920"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9584f8f52010295a4a417221861df9bea4c72d9632562b6e59b3c7b87a1522b7"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c64602e8be701c6cfe42064b71c84ce62ce66ddc6422c15463fd8127db3d8066"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:060f412230d5f19fc8c8b75f315931b408d8ebf56aec33ef4168d1b9e54200b1"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9412abdf0ba70faa6e2ee6c0cc62a8defb772e78860cef419865917d86c7342"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9737bdaa0ad33d34c0efc718741abaafce62fadae72c8b251df9b0c823c63b22"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9f0e4dc0f17dcea4ab9d13ac5c666b6b5337042b4d8f27e01b70fae41dd65c57"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1db228102ab9d1ff4c64148c96320d0be7044fa28bd865a9ce628ce98da5973d"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:d8bbd8e56f3ba25a7d0cf980fc42b34028848a53a0e36c9918550e0280b9d0b6"}, + {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:be22ae34d68544df293152b7e50895ba70d2a833ad9566932d750d3625918b82"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bf046179d011e6114daf12a534d874958b039342b347348a78b7cdf0dd9d6041"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:1a746a6d49665058a5896000e8d9d2f1a6acba8a03b389c1e4c06e11e0b7f40d"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0b8bf5b8db49d8fd40f54772a1dcf262e8be0ad2ab0206b5a2ec109c176c0a4"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f7f4cb1f173385e8a39c29510dd11a78bf44e360fb75610594973f5ea141028b"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7fbd70cb8b54fe745301921b0816c08b6d917593429dfc437fd024b5ba713c58"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bdf1303df671179eaf2cb41e8515a07fc78d9d00f111eadbe3e14262f59c3d0"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad059a4bd14c45776600d223ec194e77db6c20255578bb5bcdd7c18fd169361"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3664d126d3388a887db44c2e293f87d500c4184ec43d5d14d2d2babdb4c64cad"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:698ea95a60c8b16b58be9d854c9f993c639f5c214cf9ba782eca53a8789d6b19"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:c3d2010656999b63e628a3c694f23020322b4178c450dc478558a2b6ef3cb9bb"}, + {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:938eab7323a736533f015e6069a7d53ef2dcc841e4e533b782c2bfb9fb12d84b"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e626b365293a2142a62b9a614e1f8e331b28f3ca57b9f05ebbf4cf2a0f0bdc5"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:380e0df2e9d5d5d339803cfc6d183a5442ad7ab3c63c2a0982e8c824566c5ccc"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b760a56e080a826c2e5af09002c1a037382ed21d03134eb6294812dda268c811"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5576ee2f3a309d2bb403ec292d5958ce03953b0e57a11d224c1f134feaf8c40f"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3c3461ebb4c4f1bbc70b15d20b565759f97a5aaf13af811fcefc892e9197ba"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:637b802f3f069a64436d432117a7e58fab414b4e27a7e81049817ae94de45d8d"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffee088ea9b593cc6160518ba9bd319b5475e5f3e578e4552d63818773c6f56a"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ac732390d529d8469b831949c78085b034bff67f584559340008d0f6041a049"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:93432e747fb07fa567ad9cc7aaadd6e29710e515aabf939dfbed8046041346c6"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:7b7d9ca34542099b4e185b3c2a2b2eda2e318a7dbde0b0d83357a6d4421b5296"}, + {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:0387ce69ba06e43df54e43968090f3626e231e4bc9150e4c3246947567695f68"}, + {file = "rpds_py-0.17.1.tar.gz", hash = "sha256:0210b2668f24c078307260bf88bdac9d6f1093635df5123789bfee4d8d7fc8e7"}, +] + +[[package]] +name = "ruamel-yaml" +version = "0.18.5" +description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruamel.yaml-0.18.5-py3-none-any.whl", hash = "sha256:a013ac02f99a69cdd6277d9664689eb1acba07069f912823177c5eced21a6ada"}, + {file = "ruamel.yaml-0.18.5.tar.gz", hash = "sha256:61917e3a35a569c1133a8f772e1226961bf5a1198bea7e23f06a0841dea1ab0e"}, +] + +[package.dependencies] +"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.13\""} + +[package.extras] +docs = ["mercurial (>5.7)", "ryd"] +jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] + +[[package]] +name = "ruamel-yaml-clib" +version = "0.2.8" +description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b"}, + {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:77159f5d5b5c14f7c34073862a6b7d34944075d9f93e681638f6d753606c6ce6"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:305889baa4043a09e5b76f8e2a51d4ffba44259f6b4c72dec8ca56207d9c6fe1"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:a1a45e0bb052edf6a1d3a93baef85319733a888363938e1fc9924cb00c8df24c"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, + {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, ] [[package]] @@ -3243,14 +3688,14 @@ paramiko = "*" [[package]] name = "setuptools" -version = "69.0.0" +version = "69.0.3" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.0.0-py3-none-any.whl", hash = "sha256:eb03b43f23910c5fd0909cb677ad017cd9531f493d27f8b3f5316ff1fb07390e"}, - {file = "setuptools-69.0.0.tar.gz", hash = "sha256:4c65d4f7891e5b046e9146913b87098144de2ca2128fbc10135b8556a6ddd946"}, + {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, + {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, ] [package.extras] @@ -3298,6 +3743,18 @@ files = [ {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, ] +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] + [[package]] name = "snowballstemmer" version = "2.2.0" @@ -3328,14 +3785,14 @@ social-auth-core = ">=4.4.1" [[package]] name = "social-auth-core" -version = "4.5.0" +version = "4.5.1" description = "Python social authentication made simple." category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "social-auth-core-4.5.0.tar.gz", hash = "sha256:3d4154f45c0bacffe54ccf4361bce7e66cf5f5cd1bb0ebb7507ad09a1b07d9d9"}, - {file = "social_auth_core-4.5.0-py3-none-any.whl", hash = "sha256:f4ae5d8e503a401f319498bcad59fd1f6c473517eeae89c22299250f63c33365"}, + {file = "social-auth-core-4.5.1.tar.gz", hash = "sha256:307a4ba64d4f3ec86e4389163eac1d8b8656ffe5ab2e964aeff043ab00b3a662"}, + {file = "social_auth_core-4.5.1-py3-none-any.whl", hash = "sha256:54d0c598bf6ea0ec12bbcf78bee035c7cd604b5d781d80b7997e9e033c3ac05d"}, ] [package.dependencies] @@ -3385,6 +3842,24 @@ files = [ [package.dependencies] pbr = ">=2.0.0,<2.1.0 || >2.1.0" +[[package]] +name = "structlog" +version = "22.3.0" +description = "Structured Logging for Python" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "structlog-22.3.0-py3-none-any.whl", hash = "sha256:b403f344f902b220648fa9f286a23c0cc5439a5844d271fec40562dbadbc70ad"}, + {file = "structlog-22.3.0.tar.gz", hash = "sha256:e7509391f215e4afb88b1b80fa3ea074be57a5a17d794bd436a5c949da023333"}, +] + +[package.extras] +dev = ["structlog[docs,tests,typing]"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-mermaid", "twisted"] +tests = ["coverage[toml]", "freezegun (>=0.2.8)", "pretend", "pytest (>=6.0)", "pytest-asyncio (>=0.17)", "simplejson"] +typing = ["mypy", "rich", "twisted"] + [[package]] name = "svgwrite" version = "1.4.3" @@ -3498,14 +3973,14 @@ full = ["cerberus (>=1.3.0,<1.4.0)", "deepdiff (>=5.8.0,<5.9.0)", "jinja2 (>=3.0 [[package]] name = "ttp-templates" -version = "0.3.5" +version = "0.3.6" description = "Template Text Parser Templates collections" category = "main" optional = false python-versions = ">=3.6,<4.0" files = [ - {file = "ttp_templates-0.3.5-py3-none-any.whl", hash = "sha256:4985a68640468127a0e31021672039cd88a8b9c3dd9289cad67839209cddaf30"}, - {file = "ttp_templates-0.3.5.tar.gz", hash = "sha256:e59870d4f65bd4aaf89178dc9065a7db8b80a23d5d79b5d6ffd041312d5ec5a6"}, + {file = "ttp_templates-0.3.6-py3-none-any.whl", hash = "sha256:a126a70b23c428b541f458400eac98582894937ef3825c423d08d4384b4867ae"}, + {file = "ttp_templates-0.3.6.tar.gz", hash = "sha256:f05ac74e9ed75726fab347a144493ca7ffcd3ef298bc54fc09e2fce83c42916d"}, ] [package.dependencies] @@ -3516,26 +3991,26 @@ docs = ["mkdocs (==1.2.4)", "mkdocs-material (==7.2.2)", "mkdocs-material-extens [[package]] name = "typing-extensions" -version = "4.8.0" +version = "4.9.0" description = "Backported and Experimental Type Hints for Python 3.8+" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, - {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, + {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, + {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, ] [[package]] name = "tzdata" -version = "2023.3" +version = "2023.4" description = "Provider of IANA time zone data" category = "main" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, - {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, + {file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"}, + {file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"}, ] [[package]] @@ -3552,20 +4027,20 @@ files = [ [[package]] name = "urllib3" -version = "2.1.0" +version = "1.26.18" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, - {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, + {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, + {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] +brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "vine" @@ -3621,14 +4096,14 @@ watchmedo = ["PyYAML (>=3.10)"] [[package]] name = "wcwidth" -version = "0.2.10" +version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" category = "main" optional = false python-versions = "*" files = [ - {file = "wcwidth-0.2.10-py2.py3-none-any.whl", hash = "sha256:aec5179002dd0f0d40c456026e74a729661c9d468e1ed64405e3a6c2176ca36f"}, - {file = "wcwidth-0.2.10.tar.gz", hash = "sha256:390c7454101092a6a5e43baad8f83de615463af459201709556b6e4b1c861f97"}, + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, ] [[package]] @@ -3764,4 +4239,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.8,<3.12" -content-hash = "3eeabf46ff196a3c23659ada1fe5d50b0bd1a788f9c858f3a475d39b7e12bd80" +content-hash = "e94baa051993eec15b367e1a37faa54fb1099c3a96bd29d3841e2123d3414eed" From d2c745592b90c4df4f62abf3c69a7884dd5de0a9 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Fri, 19 Jan 2024 22:50:01 -0600 Subject: [PATCH 009/225] create new logger, processor, and process results for return data --- nautobot_device_onboarding/jobs.py | 60 +++++++--------- .../nornir_plays/command_getter.py | 2 +- .../nornir_plays/logger.py | 46 +++++++++++++ .../nornir_plays/processor.py | 69 +++++++++++++++++++ 4 files changed, 142 insertions(+), 35 deletions(-) create mode 100755 nautobot_device_onboarding/nornir_plays/logger.py create mode 100755 nautobot_device_onboarding/nornir_plays/processor.py diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index a10eb5eb..bda4d409 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -1,4 +1,6 @@ """Device Onboarding Jobs.""" +import logging + from diffsync.enum import DiffSyncFlags from django.conf import settings from django.templatetags.static import static @@ -21,6 +23,8 @@ from nautobot_device_onboarding.netdev_keeper import NetdevKeeper from nautobot_device_onboarding.nornir_plays.command_getter import netmiko_send_commands from nautobot_device_onboarding.nornir_plays.empty_inventory import EmptyInventory +from nautobot_device_onboarding.nornir_plays.logger import NornirLogger +from nautobot_device_onboarding.nornir_plays.processor import ProcessorDO from nautobot_device_onboarding.utils.inventory_creator import _set_inventory from nautobot_ssot.jobs.base import DataSource from nornir import InitNornir @@ -33,6 +37,9 @@ PLUGIN_SETTINGS = settings.PLUGINS_CONFIG["nautobot_device_onboarding"] +NORNIR_SETTINGS = settings.PLUGINS_CONFIG["nautobot_plugin_nornir"] + +LOGGER = logging.getLogger(__name__) name = "Device Onboarding/Network Importer" @@ -336,10 +343,11 @@ def __init__(self, *args, **kwargs): def run(self): mock_job_data = { - "ip4address": "174.51.52.76,10.1.1.1", + "ip4address": "10.1.1.8", "platform": "cisco_ios", - "secrets_group": SecretsGroup.objects.get(name="Cisco Devices"), - "port": 8922, + # "secrets_group": SecretsGroup.objects.get(name="Cisco Devices"), + "secrets_group": None, + "port": 22, "timeout": 30, } @@ -352,45 +360,29 @@ def run(self): # Initiate Nornir instance with empty inventory try: - with InitNornir(inventory={"plugin": "empty-inventory"}) as nr: + logger = NornirLogger(self.job_result, log_level=0) + compiled_results = {} + with InitNornir( + runner=NORNIR_SETTINGS.get("runner"), + logging={"enabled": False}, + inventory={"plugin": "empty-inventory",}, + ) as nornir_obj: + nr_with_processors = nornir_obj.with_processors([ProcessorDO(logger, compiled_results)]) ip_address = mock_job_data["ip4address"].split(",") self.platform = mock_job_data.get("platform", None) inventory_constructed = _set_inventory(ip_address, self.platform, self.port, self.secrets_group) - nr.inventory.hosts.update(inventory_constructed) - self.logger.info(nr.inventory.hosts) - - self.logger.info("Inventory built for %s devices", len(ip_address)) - - results = nr.run(task=netmiko_send_commands) + nr_with_processors.inventory.hosts.update(inventory_constructed) - for agg_result in results: - for r in results[agg_result]: - self.logger.info("host: %s", r.host) - self.logger.info("result: %s", r.result) + #### Remove before final merge #### + for host, data in nr_with_processors.inventory.hosts.items(): + self.logger.info("%s;\n%s", host, data.dict()) + #### End #### + nr_with_processors.run(task=netmiko_send_commands) except Exception as err: self.logger.info("Error: %s", err) return err - # return { - # "10.1.1.8": { - # "command_output_results": True, - # "hostname": "demo-cisco-xe", - # "serial_number": "9ABUXU580QS", - # "device_type": "CSR1000V2", - # "mgmt_ip_address": "10.1.1.8", - # "mgmt_interface": "GigabitEthernet1", - # "manufacturer": "Cisco", - # "platform": "IOS", - # "network_driver": "cisco_ios", - # "prefix": "10.0.0.0", # this is the network field on the Prefix model - # "prefix_length": 8, - # "mask_length": 24, - # }, - # "10.1.1.9": { - # "command_output_results": False, - # } - # } - return {"additonal_data": "This worked"} + return compiled_results jobs = [OnboardingTask, SSOTDeviceOnboarding, CommandGetterDO] diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index 083e861a..91b4eb44 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -5,4 +5,4 @@ def netmiko_send_commands(task: Task): for command in PLATFORM_COMMAND_MAP.get(task.host.platform, "default"): - task.run(task=netmiko_send_command, command_string=command, use_textfsm=True) + task.run(task=netmiko_send_command, name=command, command_string=command, use_textfsm=True) diff --git a/nautobot_device_onboarding/nornir_plays/logger.py b/nautobot_device_onboarding/nornir_plays/logger.py new file mode 100755 index 00000000..4fb31b04 --- /dev/null +++ b/nautobot_device_onboarding/nornir_plays/logger.py @@ -0,0 +1,46 @@ +"""Custom logger to support writing to console and db.""" +import logging +from typing import Any + +LOGGER = logging.getLogger("NORNIR_LOGGER") + +handler = logging.StreamHandler() +handler.setLevel(logging.NOTSET) +LOGGER.addHandler(handler) +LOGGER_ADAPTER = logging.LoggerAdapter(LOGGER, extra={}) + + +class NornirLogger: + """Logger that handles same signature as standard Python Library logging but also write to db.""" + + def __init__(self, job_result, log_level: int): + """Initialize the object.""" + self.job_result = job_result + LOGGER.setLevel(log_level) + + def _logging_helper(self, attr: str, message: str, extra: Any = None): + """Logger helper to set both db and console logs at once.""" + if not extra: + extra = {} + getattr(LOGGER_ADAPTER, attr)(message, extra=extra) + self.job_result.log(message, level_choice=attr, obj=extra.get("object"), grouping=extra.get("grouping", "")) + + def debug(self, message: str, extra: Any = None): + """Match standard Python Library debug signature.""" + self._logging_helper("debug", message, extra) + + def info(self, message: str, extra: Any = None): + """Match standard Python Library info signature.""" + self._logging_helper("info", message, extra) + + def warning(self, message: str, extra: Any = None): + """Match standard Python Library warning signature.""" + self._logging_helper("warning", message, extra) + + def error(self, message: str, extra: Any = None): + """Match standard Python Library error signature.""" + self._logging_helper("error", message, extra) + + def critical(self, message: str, extra: Any = None): + """Match standard Python Library critical signature.""" + self._logging_helper("critical", message, extra) \ No newline at end of file diff --git a/nautobot_device_onboarding/nornir_plays/processor.py b/nautobot_device_onboarding/nornir_plays/processor.py new file mode 100755 index 00000000..9ba15630 --- /dev/null +++ b/nautobot_device_onboarding/nornir_plays/processor.py @@ -0,0 +1,69 @@ +"""Processor used by Device Onboarding to catch unknown errors.""" + +from nornir.core.inventory import Host +from nornir.core.task import AggregatedResult, MultiResult, Task +from nornir_nautobot.exceptions import NornirNautobotException +from nornir_nautobot.plugins.processors import BaseLoggingProcessor + + +class ProcessorDO(BaseLoggingProcessor): + """Processor class for Device Onboarding jobs.""" + + def __init__(self, logger, command_outputs): + """Set logging facility.""" + self.logger = logger + self.data = command_outputs + + def task_started(self, task: Task) -> None: + self.data[task.name] = {} + self.data[task.name]["started"] = True + + def task_completed(self, task: Task, result: AggregatedResult) -> None: + self.data[task.name]["completed"] = True + + def task_instance_started(self, task: Task, host: Host) -> None: + """Processor for Logging on Task Start.""" + self.logger.info(f"Starting {task.name}.", extra={"object": task.host}) + self.data[task.name][host.name] = {"started": True} + + def task_instance_completed(self, task: Task, host: Host, result: MultiResult) -> None: + """Nornir processor task completion for OS upgrades. + + Args: + task (Task): Nornir task individual object + host (Host): Host object with Nornir + result (MultiResult): Result from Nornir task + + Returns: + None + """ + # Complex logic to see if the task exception is expected, which is depicted by + # a sub task raising a NornirNautobotException. + if result.failed: + for level_1_result in result: + if hasattr(level_1_result, "exception") and hasattr(level_1_result.exception, "result"): + for level_2_result in level_1_result.exception.result: + if isinstance(level_2_result.exception, NornirNautobotException): + return + self.logger.critical(f"{task.name} failed: {result.exception}", extra={"object": task.host}) + else: + self.logger.info( + f"Task Name: {task.name} Task Result: {result.result}", extra={"object": task.host} + ) + self.data[task.name][host.name] = { + "completed": True, + "result": result.result, + } + + def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult) -> None: + """Processor for Logging on SubTask Completed.""" + self.logger.info(f"Subtask completed {task.name}.", extra={"object": task.host}) + self.data[task.name][host.name] = { + "failed": result.failed, + "result": result.result, + } + def subtask_instance_started(self, task: Task, host: Host) -> None: + """Processor for Logging on SubTask Start.""" + self.logger.info(f"Subtask starting {task.name}.", extra={"object": task.host}) + self.data[task.name] = {} + self.data[task.name][host.name] = {"started": True} \ No newline at end of file From 9f04e84b35b4da331d0e14fdda55ff95ef602a70 Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Mon, 22 Jan 2024 23:27:07 +0000 Subject: [PATCH 010/225] processor update --- .../nornir_plays/processor.py | 46 +++++++++++++++---- 1 file changed, 38 insertions(+), 8 deletions(-) diff --git a/nautobot_device_onboarding/nornir_plays/processor.py b/nautobot_device_onboarding/nornir_plays/processor.py index 9ba15630..8f27ed6e 100755 --- a/nautobot_device_onboarding/nornir_plays/processor.py +++ b/nautobot_device_onboarding/nornir_plays/processor.py @@ -49,21 +49,51 @@ def task_instance_completed(self, task: Task, host: Host, result: MultiResult) - else: self.logger.info( f"Task Name: {task.name} Task Result: {result.result}", extra={"object": task.host} - ) + ) + self.data[task.name][host.name] = { "completed": True, - "result": result.result, + "failed": result.failed, } def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult) -> None: """Processor for Logging on SubTask Completed.""" self.logger.info(f"Subtask completed {task.name}.", extra={"object": task.host}) - self.data[task.name][host.name] = { - "failed": result.failed, - "result": result.result, - } + + formatted_data = self.format_onboarding_ios(host, result) + host_ip = host.name + + if host.name not in self.data: + self.data[host.name] = formatted_data + else: + for key, value in formatted_data.items(): + self.data[host_ip][key] = value + + def subtask_instance_started(self, task: Task, host: Host) -> None: """Processor for Logging on SubTask Start.""" self.logger.info(f"Subtask starting {task.name}.", extra={"object": task.host}) - self.data[task.name] = {} - self.data[task.name][host.name] = {"started": True} \ No newline at end of file + + def format_onboarding_ios(self, host: Host, result: MultiResult): + primary_ip4 = host.name + formatted_data = {} + + for r in result: + if r.name == "show inventory": + device_type = r.result[0].get("pid") + formatted_data["device_type"] = device_type + elif r.name == "show version": + hostname = r.result[0].get("hostname") + serial = r.result[0].get("serial") + formatted_data["hostname"] = hostname + formatted_data["serial"] = serial[0] + elif r.name == "show interfaces": + show_interfaces = r.result + for interface in show_interfaces: + if interface.get("ip_address") == primary_ip4: + mask_length = interface.get("prefix_length") + interface_name = interface.get("interface") + formatted_data["mask_length"] = mask_length + formatted_data["interface_name"] = interface_name + + return formatted_data \ No newline at end of file From 0c8f96440c3aabe9103b4286218d89ef58e059fa Mon Sep 17 00:00:00 2001 From: David Cates Date: Mon, 22 Jan 2024 21:59:56 -0700 Subject: [PATCH 011/225] update ssot models and adapters --- .../diffsync/adapters/onboarding_adapters.py | 208 ++++++++------ .../diffsync/models/onboarding_models.py | 254 +++++++++++++----- nautobot_device_onboarding/jobs.py | 7 + 3 files changed, 315 insertions(+), 154 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index c5401ae8..9a8b5fb7 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -1,74 +1,132 @@ """DiffSync adapters.""" import netaddr -from nautobot.dcim.choices import InterfaceTypeChoices -from nautobot.dcim.models import Device +from nautobot.dcim.models import Device, DeviceType, Manufacturer, Platform +from nautobot.extras.models.jobs import Job as JobModel from nautobot_device_onboarding.diffsync.models import onboarding_models -from nautobot_ssot.contrib import NautobotAdapter from diffsync import DiffSync -from nautobot.extras.models.jobs import Job as JobModel - ####################################### # FOR TESTING ONLY - TO BE REMOVED # ####################################### -mock_data = { - "10.1.1.8": { - "hostname": "demo-cisco-xe", - "serial_number": "9ABUXU580QS", - "device_type": "CSR1000V2", - "mgmt_ip_address": "10.1.1.8", - "mgmt_interface": "GigabitEthernet1", - "manufacturer": "Cisco", - "platform": "IOS", - "network_driver": "cisco_ios", - "prefix": "10.0.0.0", # this is the network field on the Prefix model - "prefix_length": 8, - "mask_length": 24, - } -} +# mock_data = { +# "10.1.1.8": { +# "hostname": "demo-cisco-xe", +# "serial_number": "9ABUXU580QS", +# "device_type": "CSR1000V2", +# "mgmt_interface": "GigabitEthernet3", +# "manufacturer": "Cisco", +# "platform": "IOS", +# "network_driver": "cisco_ios", +# "mask_length": 24, +# } +# } ####################################### ####################################### -class OnboardingNautobotAdapter(NautobotAdapter): +class OnboardingNautobotAdapter(DiffSync): """Adapter for loading Nautobot data.""" manufacturer = onboarding_models.OnboardingManufacturer platform = onboarding_models.OnboardingPlatform device = onboarding_models.OnboardingDevice device_type = onboarding_models.OnboardingDeviceType - interface = onboarding_models.OnboardingInterface - ip_address = onboarding_models.OnboardingIPAddress top_level = ["manufacturer", "platform", "device_type", "device"] - def _load_objects(self, diffsync_model): - """Given a diffsync model class, load a list of models from the database and return them.""" - parameter_names = self._get_parameter_names(diffsync_model) - if diffsync_model._model == Device: - for database_object in diffsync_model._get_queryset(filter=self.job.ip_addresses): - self._load_single_object(database_object, diffsync_model, parameter_names) - else: - for database_object in diffsync_model._get_queryset(): - self._load_single_object(database_object, diffsync_model, parameter_names) + def __init__(self, job, sync, *args, **kwargs): + """Initialize the OnboardingNautobotAdapter.""" + super().__init__(*args, **kwargs) + self.job = job + self.sync = sync + + def load_manufacturers(self): + for manufacturer in Manufacturer.objects.all(): + if self.job.debug: + self.job.logger.debug(f"Loading Manufacturer data from Nautobot...") + onboarding_manufacturer = self.manufacturer(diffsync=self, name=manufacturer.name) + self.add(onboarding_manufacturer) + if self.job.debug: + self.job.logger.debug(f"Manufacturer: {manufacturer.name} loaded.") + + def load_platforms(self): + if self.job.debug: + self.job.logger.debug(f"Loading Platform data from Nautobot...") + for platform in Platform.objects.all(): + onboarding_platform = self.platform( + diffsync=self, + name=platform.name, + network_driver=platform.network_driver, + manufacturer__name=platform.manufacturer.name, + ) + self.add(onboarding_platform) + if self.job.debug: + self.job.logger.debug(f"Platform: {platform.name} loaded.") + + def load_device_types(self): + if self.job.debug: + self.job.logger.debug(f"Loading DeviceType data from Nautobot...") + for device_type in DeviceType.objects.all(): + onboarding_device_type = self.device_type( + diffsync=self, + model=device_type.model, + manufacturer__name=device_type.manufacturer.name, + ) + self.add(onboarding_device_type) + if self.job.debug: + self.job.logger.debug(f"DeviceType: {device_type.model} loaded.") + + def load_devices(self): + if self.job.debug: + self.job.logger.debug(f"Loading Device data from Nautobot...") + + for device in Device.objects.filter(primary_ip4__host__in=self.job.ip_addresses): + interface_list = list() + for interface in device.interfaces.all(): + interface_list.append(interface.name) + + onboarding_device = self.device( + diffsync=self, + device_type__model=device.device_type.model, + location__name=device.location.name, + name=device.name, + platform__name=device.platform.name if device.platform else "", + primary_ip4__host=device.primary_ip4.host if device.primary_ip4 else "", + primary_ip4__status__name=device.primary_ip4.status.name if device.primary_ip4 else "", + role__name=device.role.name, + status__name=device.status.name, + secrets_group__name=device.secrets_group.name if device.secrets_group else "", + interfaces=interface_list, + mask_length=device.primary_ip4.mask_length if device.primary_ip4 else "", + ) + self.add(onboarding_device) + if self.job.debug: + self.job.logger.debug(f"Platform: {device.name} loaded.") + + def load(self): + """Load nautobot data.""" + self.load_manufacturers() + self.load_platforms() + self.load_device_types() + self.load_devices() class OnboardingNetworkAdapter(DiffSync): """Adapter for loading device data from a network.""" + device_data = None + manufacturer = onboarding_models.OnboardingManufacturer platform = onboarding_models.OnboardingPlatform device = onboarding_models.OnboardingDevice device_type = onboarding_models.OnboardingDeviceType - interface = onboarding_models.OnboardingInterface - ip_address = onboarding_models.OnboardingIPAddress top_level = ["manufacturer", "platform", "device_type", "device"] def __init__(self, job, sync, *args, **kwargs): - """Initialize the NautobotDiffSync.""" + """Initialize the OnboardingNetworkAdapter.""" super().__init__(*args, **kwargs) self.job = job self.sync = sync @@ -88,13 +146,7 @@ def _validate_ip_addresses(self, ip_addresses): else: raise netaddr.AddrConversionError - def load_devices(self): - """Load device data into a DiffSync model.""" - - # PROVIDE TO JOB: ip4address, port, timeout, secrets_group, platform (optional) - # TODO: CHECK FOR FAILED CONNECTIONS AND DO NOT LOAD DATA, LOG FAILED IPs - # TODO: Call onboarding job to query devices - + def execute_command_getter(self): command_getter_job = JobModel.objects.get(name="Command Getter for Device Onboarding").job_task result = command_getter_job.s() result.apply_async( @@ -102,94 +154,74 @@ def load_devices(self): kwargs=self.job.job_result.task_kwargs, **self.job.job_result.celery_kwargs, ) + self.device_data = result + + def load_devices(self): + """Load device data into a DiffSync model.""" + + # PROVIDE TO JOB: ip4address, port, timeout, secrets_group, platform (optional) + # TODO: CHECK FOR FAILED CONNECTIONS AND DO NOT LOAD DATA, LOG FAILED IPs - for ip_address in mock_data: + for ip_address in self.device_data: if self.job.debug: self.job.logger.debug(f"loading device data for {ip_address}") onboarding_device = self.device( diffsync=self, - device_type__model=mock_data[ip_address]["device_type"], + device_type__model=self.device_data[ip_address]["device_type"], location__name=self.job.location.name, - name=mock_data[ip_address]["hostname"], - platform__name=mock_data[ip_address]["platform"], + name=self.device_data[ip_address]["hostname"], + platform__name=self.device_data[ip_address]["platform"], primary_ip4__host=ip_address, + primary_ip4__status__name=self.job.ip_address_status.name, role__name=self.job.device_role.name, status__name=self.job.device_status.name, secrets_group__name=self.job.secrets_group.name, + interfaces=[self.device_data[ip_address]["mgmt_interface"]], + mask_length=self.device_data[ip_address]["mask_length"], ) self.add(onboarding_device) - self.load_interface(onboarding_device, mock_data, ip_address) def load_device_types(self): """Load device type data into a DiffSync model.""" - for ip_address in mock_data: + for ip_address in self.device_data: if self.job.debug: self.job.logger.debug(f"loading device_type data for {ip_address}") onboarding_device_type = self.device_type( diffsync=self, - model=mock_data[ip_address]["device_type"], - manufacturer__name=mock_data[ip_address]["manufacturer"], + model=self.device_data[ip_address]["device_type"], + manufacturer__name=self.device_data[ip_address]["manufacturer"], ) self.add(onboarding_device_type) - def load_interface(self, onboarding_device, device_data, ip_address): - """Load interface data into a DiffSync model.""" - if self.job.debug: - self.job.logger.debug(f"loading interface data for {ip_address}") - onboarding_interface = self.interface( - diffsync=self, - name=device_data[ip_address]["mgmt_interface"], - device__name=device_data[ip_address]["hostname"], - status__name=self.job.interface_status.name, - type=InterfaceTypeChoices.TYPE_OTHER, - mgmt_only=self.job.management_only_interface, - ) - self.add(onboarding_interface) - onboarding_device.add_child(onboarding_interface) - self.load_ip_address(onboarding_interface, mock_data, ip_address) - - def load_ip_address(self, onboarding_interface, device_data, ip_address): - """Load ip address data into a DiffSync model.""" - if self.job.debug: - self.job.logger.debug(f"loading ip address data for {ip_address}") - onboarding_ip_address = self.ip_address( - diffsync=self, - parent__namespace__name=self.job.namespace.name, - parent__network=device_data[ip_address]["prefix"], - parent__prefix_length=device_data[ip_address]["prefix_length"], - host=ip_address, - mask_length=device_data[ip_address]["mask_length"], - ) - self.add(onboarding_ip_address) - onboarding_interface.add_child(onboarding_ip_address) - def load_manufacturers(self): """Load manufacturer data into a DiffSync model.""" - for ip_address in mock_data: + for ip_address in self.device_data: if self.job.debug: self.job.logger.debug(f"loading manufacturer data for {ip_address}") onboarding_manufacturer = self.manufacturer( diffsync=self, - name=mock_data[ip_address]["manufacturer"], + name=self.device_data[ip_address]["manufacturer"], ) self.add(onboarding_manufacturer) def load_platforms(self): """Load platform data into a DiffSync model.""" - for ip_address in mock_data: + for ip_address in self.device_data: if self.job.debug: self.job.logger.debug(f"loading platform data for {ip_address}") onboarding_platform = self.platform( diffsync=self, - name=mock_data[ip_address]["platform"], - manufacturer__name=mock_data[ip_address]["manufacturer"], - network_driver=mock_data[ip_address]["network_driver"], + name=self.device_data[ip_address]["platform"], + manufacturer__name=self.device_data[ip_address]["manufacturer"], + network_driver=self.device_data[ip_address]["network_driver"], ) self.add(onboarding_platform) + def load(self): - """Load device data.""" + """Load network data.""" self._validate_ip_addresses(self.job.ip_addresses) + self.execute_command_getter() self.load_manufacturers() self.load_platforms() self.load_device_types() diff --git a/nautobot_device_onboarding/diffsync/models/onboarding_models.py b/nautobot_device_onboarding/diffsync/models/onboarding_models.py index 947336a9..ea14e7ae 100644 --- a/nautobot_device_onboarding/diffsync/models/onboarding_models.py +++ b/nautobot_device_onboarding/diffsync/models/onboarding_models.py @@ -1,61 +1,222 @@ """Diffsync models.""" +import ipaddress from typing import List, Optional import netaddr +from django.core.exceptions import ObjectDoesNotExist, ValidationError +from nautobot.apps.choices import InterfaceTypeChoices, PrefixTypeChoices from nautobot.dcim.models import Device, DeviceType, Interface, Manufacturer, Platform +from nautobot.extras.models import Role, SecretsGroup, Status from nautobot.ipam.models import IPAddress, Prefix -from nautobot_ssot.contrib import NautobotModel +from nautobot_ssot.contrib import NautobotModel +from diffsync import DiffSyncModel -class OnboardingDevice(NautobotModel): + +class OnboardingDevice(DiffSyncModel): _modelname = "device" - _model = Device - _identifiers = ("primary_ip4__host",) + # _model = Device + _identifiers = ("location__name", "name") _attributes = ( "device_type__model", - "location__name", - "name", + "primary_ip4__host", + "primary_ip4__status__name", + "prefix_length", + "mask_length", "platform__name", "role__name", "secrets_group__name", "status__name", + "interfaces", ) - _children = { - "interface": "interfaces", - } - primary_ip4__host: str + name: str + location__name: str + primary_ip4__host: Optional[str] + primary_ip4__status__name: Optional[str] + prefix_length: Optional[int] + mask_length: Optional[int] device_type__model: Optional[str] - location__name: Optional[str] - name: Optional[str] platform__name: Optional[str] role__name: Optional[str] secrets_group__name: Optional[str] status__name: Optional[str] - interfaces: List["OnboardingInterface"] = [] + interfaces: Optional[list] device_type: List["OnboardingDeviceType"] = [] @classmethod - def _get_queryset(cls, filter: list = None): - """Get the queryset used to load the models data from Nautobot.""" - parameter_names = list(cls._identifiers) + list(cls._attributes) - # Here we identify any foreign keys (i.e. fields with '__' in them) so that we can load them directly in the - # first query if this function hasn't been overridden. - prefetch_related_parameters = [parameter.split("__")[0] for parameter in parameter_names if "__" in parameter] - qs = cls.get_queryset(filter=filter) - return qs.prefetch_related(*prefetch_related_parameters) + def _get_or_create_ip_address(cls, diffsync, attrs): + """Attempt to get a Nautobot IP Address, create a new one if necessary.""" + ip_address = None + try: + ip_address = IPAddress.objects.get( + address=f"{attrs['primary_ip4__host']}/{attrs['mask_length']}", + parent__namespace=diffsync.job.namespace, + ) + except ObjectDoesNotExist: + try: + ip_address = IPAddress.objects.create( + address=f"{attrs['primary_ip4__host']}/{attrs['mask_length']}", + namespace=diffsync.job.namespace, + status=diffsync.job.ip_address_status, + ) + except ValidationError as err: + diffsync.job.logger.warning( + f"No suitable parent Prefix exists for IP {attrs['primary_ip4__host']} in " + f"Namespace {diffsync.job.namespace.name}, a new Prefix will be created." + ) + new_prefix = ipaddress.ip_interface(f"{attrs['primary_ip4__host']}/{attrs['mask_length']}") + try: + prefix = Prefix.objects.get( + prefix=f"{new_prefix.network}", + namespace=diffsync.job.namespace, + ) + except ObjectDoesNotExist: + prefix = Prefix.objects.create( + prefix=f"{new_prefix.network}", + namespace=diffsync.job.namespace, + type=PrefixTypeChoices.TYPE_NETWORK, + status=diffsync.job.ip_address_status, + ) + ip_address, _ = IPAddress.objects.get_or_create( + address=f"{attrs['primary_ip4__host']}/{attrs['mask_length']}", + status=diffsync.job.ip_address_status, + parent=prefix, + ) + return ip_address + + @classmethod + def _get_or_create_interface(cls, diffsync, device, attrs): + """Attempt to get a Device Interface, create a new one if necessary.""" + device_interface = None + try: + device_interface = Interface.objects.get( + name=attrs["interfaces"][0], + device=device, + ) + except ObjectDoesNotExist: + try: + device_interface = Interface.objects.create( + name=attrs["interfaces"][0], + mgmt_only=diffsync.job.management_only_interface, + status=diffsync.job.interface_status, + type=InterfaceTypeChoices.TYPE_OTHER, + device=device, + ) + except ValidationError as err: + diffsync.job.logger.error(f"Device Interface could not be created, {err}") + return device_interface + @classmethod - def get_queryset(cls, filter: list = None): - """Get the queryset used to load the models data from Nautobot.""" - if filter: - # Only devices with a primary_ip that is being onboarded should be considered for the sync - return cls._model.objects.filter(primary_ip4__host__in=filter) + def create(cls, diffsync, ids, attrs): + """Create a new nautobot device using data scraped from a device.""" + # Determine device platform + platform = None + if diffsync.job.platform: + platform = diffsync.job.platform else: - return cls._model.objects.all() + platform = Platform.objects.get(name=attrs["platform__name"]) + + try: + device = Device.objects.get(**ids) + + except ObjectDoesNotExist: + # Create Device + device = Device.objects.create( + location=diffsync.job.location, + status=diffsync.job.device_status, + role=diffsync.job.device_role, + device_type=DeviceType.objects.get(model=attrs["device_type__model"]), + name=ids["name"], + platform=platform, + secrets_group=diffsync.job.secrets_group, + ) + + ip_address = cls._get_or_create_ip_address(diffsync=diffsync, attrs=attrs) + interface = cls._get_or_create_interface(diffsync=diffsync, device=device, attrs=attrs) + interface.ip_addresses.add(ip_address) + interface.validated_save() + + # Assign primary IP Address to device + try: + device.primary_ip4 = ip_address + device.validated_save() + except ValidationError as err: + diffsync.job.logger.error( + f"Failed to save changes to {attrs['primary_ip4__host']} Device: {ids['name']}, {err}" + ) + + return DiffSyncModel.create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update an existing nautobot device using data scraped from a device.""" + + device = Device.objects.get(name=self.name, location__name=self.location__name) + if self.diffsync.job.debug: + self.diffsync.job.logger.debug(f"Updating device with attrs: {attrs}") + if attrs.get("device_type__model"): + device.device_type = DeviceType.objects.get(model=attrs.get("device_type__model")) + if attrs.get("platform__name"): + device.platform = Platform.objects.get(name=attrs.get("platform__name")) + if attrs.get("role__name"): + device.role = Role.objects.get(name=attrs.get("role__name")) + if attrs.get("status__name"): + device.status = Status.objects.get(name=attrs.get("status__name")) + if attrs.get("secrets_group__name"): + device.secrets_group = SecretsGroup.objects.get(name=attrs.get("secrets_group__name")) + if attrs.get("primary_ip4__status__name"): + device.primary_ip.status.name = Status.objects.get(name=attrs.get("primary_ip4__status__name")) + + if attrs.get("interfaces"): + interface = self._get_or_create_interface(diffsync=self.diffsync, device=device, attrs=attrs) + # If the primary ip address is being updated, the mask length must be included + if attrs.get("primary_ip4__host"): + if not attrs.get("mask_length"): + attrs["mask_length"] = device.primary_ip4.mask_length + ip_address = self._get_or_create_ip_address(diffsync=self.diffsync, attrs=attrs) + interface.ip_addresses.add(ip_address) + interface.validated_save() + # set the new ip address as the device primary ip address + device.primary_ip4 = ip_address + interface.validated_save() + else: + # Check for a device with a matching IP Address and remove it before assigning + # the IP Address to the new interface + try: + old_interface = Interface.objects.get( + device=device, + ip_addresses__in=[device.primary_ip4] + ) + old_interface.ip_addresses.remove(device.primary_ip4) + old_interface.validated_save() + interface.ip_addresses.add(device.primary_ip4) + interface.validated_save() + except ObjectDoesNotExist: + interface.ip_addresses.add(device.primary_ip4) + interface.validated_save() + else: + # update the primary ip address when the interface has not changed + if attrs.get("primary_ip4__host"): + if not attrs.get("mask_length"): + attrs["mask_length"] = device.primary_ip4.mask_length + ip_address = self._get_or_create_ip_address(diffsync=self.diffsync, attrs=attrs) + interface = Interface.objects.get( + device=device, + ip_addresses__in=[device.primary_ip4] + ) + interface.ip_addresses.remove(device.primary_ip4) #TODO: This is not removing the IP from the interface as expected + interface.ip_addresses.add(ip_address) + interface.validated_save() + device.primary_ip4 = ip_address + try: + device.validated_save() + except ValidationError as err: + self.diffsync.job.logger.error(f"Device {self.name} failed to update, {err}") + return super().update(attrs) class OnboardingDeviceType(NautobotModel): @@ -67,45 +228,6 @@ class OnboardingDeviceType(NautobotModel): manufacturer__name: str -class OnboardingInterface(NautobotModel): - _modelname = "interface" - _model = Interface - _identifiers = ("name", "device__name") - _attributes = ( - "mgmt_only", - "status__name", - "type", - ) - _children = {"ip_address": "ip_addresses"} - - name: str - device__name: str - - mgmt_only: Optional[bool] - status__name: Optional[str] - type: Optional[str] - - ip_addresses: List["OnboardingIPAddress"] = [] - - -class OnboardingIPAddress(NautobotModel): - _modelname = "ip_address" - _model = IPAddress - _identifiers = ( - "parent__namespace__name", - "parent__network", - "parent__prefix_length", - "host", - "mask_length", - ) - - parent__namespace__name: str - parent__network: str - parent__prefix_length: int - host: str - mask_length: int - - class OnboardingManufacturer(NautobotModel): _modelname = "manufacturer" _model = Manufacturer diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index bda4d409..579f003b 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -262,6 +262,12 @@ class Meta: required=True, description="Status to be applied to all onboarded device interfaces", ) + ip_address_status = ObjectVar( + model=Status, + query_params={"content_types": "ipam.ipaddress"}, + required=True, + description="Status to be applied to all onboarded ip addresses.", + ) port = IntegerVar(default=22) timeout = IntegerVar(default=30) secrets_group = ObjectVar( @@ -296,6 +302,7 @@ def run(self, dryrun, memory_profiling, *args, **kwargs): # pylint:disable=argu self.device_role = kwargs["device_role"] self.device_status = kwargs["device_status"] self.interface_status = kwargs["interface_status"] + self.ip_address_status = kwargs["ip_address_status"] self.port = kwargs["port"] self.timeout = kwargs["timeout"] self.secrets_group = kwargs["secrets_group"] From f9864c91a89d74c34fe90b361ae291f951857d4a Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Tue, 23 Jan 2024 20:40:42 +0000 Subject: [PATCH 012/225] updated formatter --- nautobot_device_onboarding/jobs.py | 10 ++-- .../nornir_plays/formatter.py | 55 ++++++++++++++++++ .../nornir_plays/processor.py | 57 +++++++------------ 3 files changed, 81 insertions(+), 41 deletions(-) create mode 100644 nautobot_device_onboarding/nornir_plays/formatter.py diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index bda4d409..1f03b195 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -343,11 +343,10 @@ def __init__(self, *args, **kwargs): def run(self): mock_job_data = { - "ip4address": "10.1.1.8", - "platform": "cisco_ios", - # "secrets_group": SecretsGroup.objects.get(name="Cisco Devices"), - "secrets_group": None, - "port": 22, + "ip4address": "174.51.52.76", + "platform": "cisco_nxos", + "secrets_group": SecretsGroup.objects.get(name="NW_CREDS"), + "port": 8022, "timeout": 30, } @@ -379,6 +378,7 @@ def run(self): #### End #### nr_with_processors.run(task=netmiko_send_commands) + except Exception as err: self.logger.info("Error: %s", err) return err diff --git a/nautobot_device_onboarding/nornir_plays/formatter.py b/nautobot_device_onboarding/nornir_plays/formatter.py new file mode 100644 index 00000000..4a41ec20 --- /dev/null +++ b/nautobot_device_onboarding/nornir_plays/formatter.py @@ -0,0 +1,55 @@ +def format_ob_data_ios(host, result): + """Format the data for onboarding IOS devices.""" + primary_ip4 = host.name + formatted_data = {} + + for r in result: + if r.name == "show inventory": + device_type = r.result[0].get("pid") + formatted_data["device_type"] = device_type + elif r.name == "show version": + hostname = r.result[0].get("hostname") + serial = r.result[0].get("serial") + formatted_data["hostname"] = hostname + formatted_data["serial"] = serial[0] + elif r.name == "show interfaces": + show_interfaces = r.result + for interface in show_interfaces: + if interface.get("ip_address") == primary_ip4: + mask_length = interface.get("prefix_length") + interface_name = interface.get("interface") + formatted_data["mask_length"] = mask_length + formatted_data["interface_name"] = interface_name + + return formatted_data + +# TODO: Add NXOS formatter, others if necessary +def format_ob_data_nxos(host, result): + """Format the data for onboarding NXOS devices.""" + primary_ip4 = host.name + formatted_data = {} + + for r in result: + if r.name == "show inventory": + # TODO: Add check for PID when textfsm template is fixed + pass + elif r.name == "show version": + device_type = r.result[0].get("platform") + formatted_data["device_type"] = device_type + hostname = r.result[0].get("hostname") + serial = r.result[0].get("serial") + formatted_data["hostname"] = hostname + if serial: + formatted_data["serial"] = serial[0] + else: + formatted_data["serial"] = "" + elif r.name == "show interfaces": + show_interfaces = r.result + for interface in show_interfaces: + if interface.get("ip_address") == primary_ip4: + mask_length = interface.get("prefix_length") + interface_name = interface.get("interface") + formatted_data["mask_length"] = mask_length + formatted_data["interface_name"] = interface_name + + return formatted_data diff --git a/nautobot_device_onboarding/nornir_plays/processor.py b/nautobot_device_onboarding/nornir_plays/processor.py index 8f27ed6e..1a8e9882 100755 --- a/nautobot_device_onboarding/nornir_plays/processor.py +++ b/nautobot_device_onboarding/nornir_plays/processor.py @@ -1,10 +1,11 @@ """Processor used by Device Onboarding to catch unknown errors.""" +from typing import Dict from nornir.core.inventory import Host from nornir.core.task import AggregatedResult, MultiResult, Task from nornir_nautobot.exceptions import NornirNautobotException from nornir_nautobot.plugins.processors import BaseLoggingProcessor - +from nautobot_device_onboarding.nornir_plays.formatter import format_ob_data_ios, format_ob_data_nxos class ProcessorDO(BaseLoggingProcessor): """Processor class for Device Onboarding jobs.""" @@ -12,7 +13,7 @@ class ProcessorDO(BaseLoggingProcessor): def __init__(self, logger, command_outputs): """Set logging facility.""" self.logger = logger - self.data = command_outputs + self.data: Dict = command_outputs def task_started(self, task: Task) -> None: self.data[task.name] = {} @@ -50,8 +51,8 @@ def task_instance_completed(self, task: Task, host: Host, result: MultiResult) - self.logger.info( f"Task Name: {task.name} Task Result: {result.result}", extra={"object": task.host} ) - - self.data[task.name][host.name] = { + + self.data[host.name] = { "completed": True, "failed": result.failed, } @@ -59,41 +60,25 @@ def task_instance_completed(self, task: Task, host: Host, result: MultiResult) - def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult) -> None: """Processor for Logging on SubTask Completed.""" self.logger.info(f"Subtask completed {task.name}.", extra={"object": task.host}) - - formatted_data = self.format_onboarding_ios(host, result) - host_ip = host.name - + self.logger.info(f"Subtask result {result.result}.", extra={"object": task.host}) if host.name not in self.data: - self.data[host.name] = formatted_data + self.data[host.name] = { + "platform": host.platform, + "manufacturer": host.platform.split("_")[0].title() if host.platform else "PLACEHOLDER", + } + + if host.platform == "cisco_ios": + formatted_data = format_ob_data_ios(host, result) + elif host.platform == "cisco_nxos": + formatted_data = format_ob_data_nxos(host, result) else: - for key, value in formatted_data.items(): - self.data[host_ip][key] = value - + formatted_data = {} + + self.data[host.name].update(formatted_data) + + def subtask_instance_started(self, task: Task, host: Host) -> None: """Processor for Logging on SubTask Start.""" self.logger.info(f"Subtask starting {task.name}.", extra={"object": task.host}) - - def format_onboarding_ios(self, host: Host, result: MultiResult): - primary_ip4 = host.name - formatted_data = {} - - for r in result: - if r.name == "show inventory": - device_type = r.result[0].get("pid") - formatted_data["device_type"] = device_type - elif r.name == "show version": - hostname = r.result[0].get("hostname") - serial = r.result[0].get("serial") - formatted_data["hostname"] = hostname - formatted_data["serial"] = serial[0] - elif r.name == "show interfaces": - show_interfaces = r.result - for interface in show_interfaces: - if interface.get("ip_address") == primary_ip4: - mask_length = interface.get("prefix_length") - interface_name = interface.get("interface") - formatted_data["mask_length"] = mask_length - formatted_data["interface_name"] = interface_name - - return formatted_data \ No newline at end of file + \ No newline at end of file From aaa0c1f07a384b7c027a75d0ee6fb1bff6921532 Mon Sep 17 00:00:00 2001 From: David Cates Date: Tue, 23 Jan 2024 14:00:25 -0700 Subject: [PATCH 013/225] update ssot integration --- .../diffsync/adapters/onboarding_adapters.py | 41 +++++---- .../diffsync/models/onboarding_models.py | 86 +++++++++++++------ nautobot_device_onboarding/jobs.py | 6 ++ 3 files changed, 91 insertions(+), 42 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index 9a8b5fb7..d91b2090 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -1,8 +1,10 @@ """DiffSync adapters.""" +import time import netaddr from nautobot.dcim.models import Device, DeviceType, Manufacturer, Platform from nautobot.extras.models.jobs import Job as JobModel +from nautobot.extras.models.jobs import JobResult from nautobot_device_onboarding.diffsync.models import onboarding_models from diffsync import DiffSync @@ -10,18 +12,18 @@ ####################################### # FOR TESTING ONLY - TO BE REMOVED # ####################################### -# mock_data = { -# "10.1.1.8": { -# "hostname": "demo-cisco-xe", -# "serial_number": "9ABUXU580QS", -# "device_type": "CSR1000V2", -# "mgmt_interface": "GigabitEthernet3", -# "manufacturer": "Cisco", -# "platform": "IOS", -# "network_driver": "cisco_ios", -# "mask_length": 24, -# } -# } +mock_data = { + "10.1.1.11": { + "hostname": "demo-cisco-xe", + "serial": "9ABUXU580QS", + "device_type": "CSR1000V2", + "mgmt_interface": "GigabitEthernet1", + "manufacturer": "Cisco", + "platform": "IOS", + "network_driver": "cisco_ios", + "mask_length": 24, + } +} ####################################### ####################################### @@ -72,6 +74,7 @@ def load_device_types(self): onboarding_device_type = self.device_type( diffsync=self, model=device_type.model, + part_number=device_type.model, manufacturer__name=device_type.manufacturer.name, ) self.add(onboarding_device_type) @@ -100,6 +103,7 @@ def load_devices(self): secrets_group__name=device.secrets_group.name if device.secrets_group else "", interfaces=interface_list, mask_length=device.primary_ip4.mask_length if device.primary_ip4 else "", + serial=device.serial, ) self.add(onboarding_device) if self.job.debug: @@ -116,7 +120,7 @@ def load(self): class OnboardingNetworkAdapter(DiffSync): """Adapter for loading device data from a network.""" - device_data = None + device_data = mock_data manufacturer = onboarding_models.OnboardingManufacturer platform = onboarding_models.OnboardingPlatform @@ -149,12 +153,15 @@ def _validate_ip_addresses(self, ip_addresses): def execute_command_getter(self): command_getter_job = JobModel.objects.get(name="Command Getter for Device Onboarding").job_task result = command_getter_job.s() - result.apply_async( + task_result_id = result.apply_async( args=self.job.job_result.task_args, kwargs=self.job.job_result.task_kwargs, **self.job.job_result.celery_kwargs, ) - self.device_data = result + time.sleep(15) + job_result = JobResult.objects.get(id=str(task_result_id)) + self.job.logger.warning(job_result.result) + self.job.logger.warning(task_result_id) def load_devices(self): """Load device data into a DiffSync model.""" @@ -178,6 +185,7 @@ def load_devices(self): secrets_group__name=self.job.secrets_group.name, interfaces=[self.device_data[ip_address]["mgmt_interface"]], mask_length=self.device_data[ip_address]["mask_length"], + serial=self.device_data[ip_address]["serial"], ) self.add(onboarding_device) @@ -189,6 +197,7 @@ def load_device_types(self): onboarding_device_type = self.device_type( diffsync=self, model=self.device_data[ip_address]["device_type"], + part_number=self.device_data[ip_address]["device_type"], manufacturer__name=self.device_data[ip_address]["manufacturer"], ) self.add(onboarding_device_type) @@ -221,7 +230,7 @@ def load_platforms(self): def load(self): """Load network data.""" self._validate_ip_addresses(self.job.ip_addresses) - self.execute_command_getter() + # self.execute_command_getter() self.load_manufacturers() self.load_platforms() self.load_device_types() diff --git a/nautobot_device_onboarding/diffsync/models/onboarding_models.py b/nautobot_device_onboarding/diffsync/models/onboarding_models.py index ea14e7ae..fee53941 100644 --- a/nautobot_device_onboarding/diffsync/models/onboarding_models.py +++ b/nautobot_device_onboarding/diffsync/models/onboarding_models.py @@ -17,13 +17,13 @@ class OnboardingDevice(DiffSyncModel): _modelname = "device" # _model = Device - _identifiers = ("location__name", "name") + _identifiers = ("location__name", "name", "serial",) _attributes = ( "device_type__model", + "mask_length", "primary_ip4__host", "primary_ip4__status__name", "prefix_length", - "mask_length", "platform__name", "role__name", "secrets_group__name", @@ -33,12 +33,13 @@ class OnboardingDevice(DiffSyncModel): name: str location__name: str + serial: Optional[str] + device_type__model: Optional[str] + mask_length: Optional[int] primary_ip4__host: Optional[str] primary_ip4__status__name: Optional[str] prefix_length: Optional[int] - mask_length: Optional[int] - device_type__model: Optional[str] platform__name: Optional[str] role__name: Optional[str] secrets_group__name: Optional[str] @@ -47,6 +48,44 @@ class OnboardingDevice(DiffSyncModel): interfaces: Optional[list] device_type: List["OnboardingDeviceType"] = [] + @classmethod + def _get_or_create_device(cls, platform, diffsync, ids, attrs): + """Attempt to get a Device, create a new one if necessary.""" + device = None + try: + # Only Devices with a primary ip address are loaded from Nautobot when syncing. + # If a device is found in Nautobot with a matching name and location as the + # device being created, but does not have a primary ip address, it will need + # to be updated or skipped based on user preference. + + device = Device.objects.get(name=ids["name"], location=diffsync.job.location) + if diffsync.job.update_devices_without_primary_ip: + diffsync.job.logger.warning( + f"Device {ids['name']} already exists in Nautobot but does not have a primary " + "IP Address, this device will be udpated.") + # attrs.update(ids) + diffsync.job.logger.warning(attrs) + #TODO: update matched device! + else: + diffsync.job.logger.warning( + f"Device {ids['name']} already exists in Nautobot but does not have a primary " + "IP Address, this device will be skipped.") + return device + + except ObjectDoesNotExist: + # Create Device + device = Device.objects.create( + location=diffsync.job.location, + status=diffsync.job.device_status, + role=diffsync.job.device_role, + device_type=DeviceType.objects.get(model=attrs["device_type__model"]), + name=ids["name"], + platform=platform, + secrets_group=diffsync.job.secrets_group, + serial=attrs["serial"] + ) + return device + @classmethod def _get_or_create_ip_address(cls, diffsync, attrs): """Attempt to get a Nautobot IP Address, create a new one if necessary.""" @@ -87,7 +126,7 @@ def _get_or_create_ip_address(cls, diffsync, attrs): parent=prefix, ) return ip_address - + @classmethod def _get_or_create_interface(cls, diffsync, device, attrs): """Attempt to get a Device Interface, create a new one if necessary.""" @@ -110,7 +149,6 @@ def _get_or_create_interface(cls, diffsync, device, attrs): diffsync.job.logger.error(f"Device Interface could not be created, {err}") return device_interface - @classmethod def create(cls, diffsync, ids, attrs): """Create a new nautobot device using data scraped from a device.""" @@ -120,28 +158,15 @@ def create(cls, diffsync, ids, attrs): platform = diffsync.job.platform else: platform = Platform.objects.get(name=attrs["platform__name"]) - - try: - device = Device.objects.get(**ids) - - except ObjectDoesNotExist: - # Create Device - device = Device.objects.create( - location=diffsync.job.location, - status=diffsync.job.device_status, - role=diffsync.job.device_role, - device_type=DeviceType.objects.get(model=attrs["device_type__model"]), - name=ids["name"], - platform=platform, - secrets_group=diffsync.job.secrets_group, - ) - + + # Get or create Device, Interface and IP Address + device = cls._get_or_create_device(platform, diffsync, ids, attrs) ip_address = cls._get_or_create_ip_address(diffsync=diffsync, attrs=attrs) interface = cls._get_or_create_interface(diffsync=diffsync, device=device, attrs=attrs) interface.ip_addresses.add(ip_address) interface.validated_save() - # Assign primary IP Address to device + # Assign primary IP Address to Device try: device.primary_ip4 = ip_address device.validated_save() @@ -154,8 +179,9 @@ def create(cls, diffsync, ids, attrs): def update(self, attrs): """Update an existing nautobot device using data scraped from a device.""" - device = Device.objects.get(name=self.name, location__name=self.location__name) + self._update_device_attributes(device=device, attrs=attrs) + if self.diffsync.job.debug: self.diffsync.job.logger.debug(f"Updating device with attrs: {attrs}") if attrs.get("device_type__model"): @@ -170,11 +196,14 @@ def update(self, attrs): device.secrets_group = SecretsGroup.objects.get(name=attrs.get("secrets_group__name")) if attrs.get("primary_ip4__status__name"): device.primary_ip.status.name = Status.objects.get(name=attrs.get("primary_ip4__status__name")) + if attrs.get("serial"): + device.primary_ip.serial = attrs.get("serial") if attrs.get("interfaces"): interface = self._get_or_create_interface(diffsync=self.diffsync, device=device, attrs=attrs) - # If the primary ip address is being updated, the mask length must be included + # Update both the interface and primary ip address if attrs.get("primary_ip4__host"): + # If the primary ip address is being updated, the mask length must be included if not attrs.get("mask_length"): attrs["mask_length"] = device.primary_ip4.mask_length ip_address = self._get_or_create_ip_address(diffsync=self.diffsync, attrs=attrs) @@ -183,6 +212,7 @@ def update(self, attrs): # set the new ip address as the device primary ip address device.primary_ip4 = ip_address interface.validated_save() + # Update the interface only else: # Check for a device with a matching IP Address and remove it before assigning # the IP Address to the new interface @@ -198,8 +228,8 @@ def update(self, attrs): except ObjectDoesNotExist: interface.ip_addresses.add(device.primary_ip4) interface.validated_save() + # Update the primary ip address only else: - # update the primary ip address when the interface has not changed if attrs.get("primary_ip4__host"): if not attrs.get("mask_length"): attrs["mask_length"] = device.primary_ip4.mask_length @@ -209,6 +239,7 @@ def update(self, attrs): ip_addresses__in=[device.primary_ip4] ) interface.ip_addresses.remove(device.primary_ip4) #TODO: This is not removing the IP from the interface as expected + interface.validated_save() interface.ip_addresses.add(ip_address) interface.validated_save() device.primary_ip4 = ip_address @@ -223,10 +254,13 @@ class OnboardingDeviceType(NautobotModel): _modelname = "device_type" _model = DeviceType _identifiers = ("model", "manufacturer__name") + _attributes = ("part_number",) model: str manufacturer__name: str + part_number: str + class OnboardingManufacturer(NautobotModel): _modelname = "manufacturer" diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 775e4eca..f9bee9a5 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -244,6 +244,11 @@ class Meta: label="Set Management Only", description="If True, interfaces that are created or updated will be set to management only. If False, the interface will be set to not be management only.", ) + update_devices_without_primary_ip = BooleanVar( + default=False, + description="If a device at the specified location already exists in Nautobot but is " + "missing a primary ip address, update it with the sync." + ) device_role = ObjectVar( model=Role, query_params={"content_types": "dcim.device"}, @@ -299,6 +304,7 @@ def run(self, dryrun, memory_profiling, *args, **kwargs): # pylint:disable=argu self.namespace = kwargs["namespace"] self.ip_addresses = kwargs["ip_addresses"].replace(" ", "").split(",") self.management_only_interface = kwargs["management_only_interface"] + self.update_devices_without_primary_ip = kwargs["update_devices_without_primary_ip"] self.device_role = kwargs["device_role"] self.device_status = kwargs["device_status"] self.interface_status = kwargs["interface_status"] From 7579c6f8a268b891a89112265c8d89a99414821d Mon Sep 17 00:00:00 2001 From: David Cates Date: Tue, 23 Jan 2024 16:32:18 -0700 Subject: [PATCH 014/225] update ssot integration --- .../diffsync/adapters/onboarding_adapters.py | 6 +- nautobot_device_onboarding/jobs.py | 132 ++++++++++++++---- 2 files changed, 108 insertions(+), 30 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index d91b2090..d0193ba3 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -120,7 +120,7 @@ def load(self): class OnboardingNetworkAdapter(DiffSync): """Adapter for loading device data from a network.""" - device_data = mock_data + device_data = None manufacturer = onboarding_models.OnboardingManufacturer platform = onboarding_models.OnboardingPlatform @@ -162,6 +162,8 @@ def execute_command_getter(self): job_result = JobResult.objects.get(id=str(task_result_id)) self.job.logger.warning(job_result.result) self.job.logger.warning(task_result_id) + self.job.logger.warning(self.job.job_result.task_kwargs) + self.job.logger.warning(self.job.job_result.task_args) def load_devices(self): """Load device data into a DiffSync model.""" @@ -230,7 +232,7 @@ def load_platforms(self): def load(self): """Load network data.""" self._validate_ip_addresses(self.job.ip_addresses) - # self.execute_command_getter() + self.execute_command_getter() self.load_manufacturers() self.load_platforms() self.load_device_types() diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index f9bee9a5..d1989d8d 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -228,7 +228,6 @@ class Meta: default=False, description="Enable for more verbose logging.", ) - location = ObjectVar( model=Location, query_params={"content_type": "dcim.device"}, @@ -313,8 +312,21 @@ def run(self, dryrun, memory_profiling, *args, **kwargs): # pylint:disable=argu self.timeout = kwargs["timeout"] self.secrets_group = kwargs["secrets_group"] self.platform = kwargs["platform"] - super().run(dryrun, memory_profiling, *args, **kwargs) + print(self.job_result.as_dict()) + + kwargs["location"] = kwargs["location"].id + kwargs["namespace"] = kwargs["namespace"].id + kwargs["device_role"] = kwargs["device_role"].id + kwargs["device_status"] = kwargs["device_status"].id + kwargs["interface_status"] = kwargs["interface_status"].id + kwargs["ip_address_status"] = kwargs["ip_address_status"].id + kwargs["secrets_group"] = kwargs["secrets_group"].id + kwargs["platform"] = self.platform = kwargs["platform"].id if kwargs["platform"] else "" + + self.job_result.task_kwargs = kwargs + + super().run(dryrun, memory_profiling, *args, **kwargs) class SSOTNetworkImporter(DataSource): """Job syncing extended device attributes into Nautobot.""" @@ -342,33 +354,98 @@ class Meta: # pylint: disable=too-few-public-methods has_sensitive_variables = False hidden = False - def __init__(self, *args, **kwargs): - """Initialize Command Getter Job.""" - self.username = None - self.password = None - self.secret = None - self.secrets_group = None - self.ip4address = None - self.platform = None - self.port = None - self.timeout = None - super().__init__(*args, **kwargs) + # def __init__(self, *args, **kwargs): + # """Initialize Command Getter Job.""" + # self.username = None + # self.password = None + # self.secret = None + # self.secrets_group = None + # self.ip4address = None + # self.platform = None + # self.port = None + # self.timeout = None + # super().__init__(*args, **kwargs) + + debug = BooleanVar( + default=False, + description="Enable for more verbose logging.", + ) + location = ObjectVar( + model=Location, + query_params={"content_type": "dcim.device"}, + description="Assigned Location for the onboarded device(s)", + ) + namespace = ObjectVar(model=Namespace, description="Namespace ip addresses belong to.") + ip_addresses = StringVar( + description="IP Address of the device to onboard, specify in a comma separated list for multiple devices.", + label="IPv4 Addresses", + ) + management_only_interface = BooleanVar( + default=False, + label="Set Management Only", + description="If True, interfaces that are created or updated will be set to management only. If False, the interface will be set to not be management only.", + ) + update_devices_without_primary_ip = BooleanVar( + default=False, + description="If a device at the specified location already exists in Nautobot but is " + "missing a primary ip address, update it with the sync." + ) + device_role = ObjectVar( + model=Role, + query_params={"content_types": "dcim.device"}, + required=True, + description="Role to be applied to all onboarded devices", + ) + device_status = ObjectVar( + model=Status, + query_params={"content_types": "dcim.device"}, + required=True, + description="Status to be applied to all onboarded devices", + ) + interface_status = ObjectVar( + model=Status, + query_params={"content_types": "dcim.interface"}, + required=True, + description="Status to be applied to all onboarded device interfaces", + ) + ip_address_status = ObjectVar( + model=Status, + query_params={"content_types": "ipam.ipaddress"}, + required=True, + description="Status to be applied to all onboarded ip addresses.", + ) + port = IntegerVar(default=22) + timeout = IntegerVar(default=30) + secrets_group = ObjectVar( + model=SecretsGroup, required=True, description="SecretsGroup for device connection credentials." + ) + platform = ObjectVar( + model=Platform, + required=False, + description="Device platform. Define ONLY to override auto-recognition of platform.", + ) - def run(self): - mock_job_data = { - "ip4address": "174.51.52.76", - "platform": "cisco_nxos", - "secrets_group": SecretsGroup.objects.get(name="NW_CREDS"), - "port": 8022, - "timeout": 30, - } + def run(self, *args, **kwargs): + # mock_job_data = { + # "ip4address": "174.51.52.76", + # "platform": "cisco_nxos", + # "secrets_group": SecretsGroup.objects.get(name="NW_CREDS"), + # "port": 8022, + # "timeout": 30, + # } """Process onboarding task from ssot-ni job.""" - self.ip4address = mock_job_data["ip4address"] - self.secrets_group = mock_job_data["secrets_group"] - self.platform = mock_job_data["platform"] - self.port = mock_job_data["port"] - self.timeout = mock_job_data["timeout"] + # self.ip4address = mock_job_data["ip4address"] + # self.secrets_group = mock_job_data["secrets_group"] + # self.platform = mock_job_data["platform"] + # self.port = mock_job_data["port"] + # self.timeout = mock_job_data["timeout"] + + self.ip_addresses = kwargs["ip_addresses"].replace(" ", "").split(",") + self.port = kwargs["port"] + self.timeout = kwargs["timeout"] + self.secrets_group = kwargs["secrets_group"] + self.platform = kwargs["platform"] # Initiate Nornir instance with empty inventory try: @@ -380,8 +457,7 @@ def run(self): inventory={"plugin": "empty-inventory",}, ) as nornir_obj: nr_with_processors = nornir_obj.with_processors([ProcessorDO(logger, compiled_results)]) - ip_address = mock_job_data["ip4address"].split(",") - self.platform = mock_job_data.get("platform", None) + ip_address = self.ip_addresses inventory_constructed = _set_inventory(ip_address, self.platform, self.port, self.secrets_group) nr_with_processors.inventory.hosts.update(inventory_constructed) From 237268caadd32b4e2349ab1975c2070d6a3045a8 Mon Sep 17 00:00:00 2001 From: David Cates Date: Wed, 24 Jan 2024 17:27:36 -0700 Subject: [PATCH 015/225] update ssot integration --- .../diffsync/adapters/onboarding_adapters.py | 171 +++++++++++------- .../diffsync/models/onboarding_models.py | 137 +++++++++----- nautobot_device_onboarding/jobs.py | 58 +++--- 3 files changed, 231 insertions(+), 135 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index d0193ba3..30711d08 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -1,34 +1,45 @@ """DiffSync adapters.""" import time + import netaddr from nautobot.dcim.models import Device, DeviceType, Manufacturer, Platform from nautobot.extras.models.jobs import Job as JobModel from nautobot.extras.models.jobs import JobResult from nautobot_device_onboarding.diffsync.models import onboarding_models -from diffsync import DiffSync +import diffsync ####################################### # FOR TESTING ONLY - TO BE REMOVED # ####################################### mock_data = { - "10.1.1.11": { - "hostname": "demo-cisco-xe", - "serial": "9ABUXU580QS", + "10.1.1.15": { + "hostname": "demo-cisco-xe1", + "serial": "9ABUXU581111", + "device_type": "CSR1000V17", + "mgmt_interface": "GigabitEthernet20", + "manufacturer": "Cisco", + "platform": "IOS-test", + "network_driver": "cisco_ios", + "mask_length": 16, + }, + "200.1.1.13": { + "hostname": "demo-cisco-xe2", + "serial": "9ABUXU5882222", "device_type": "CSR1000V2", - "mgmt_interface": "GigabitEthernet1", + "mgmt_interface": "GigabitEthernet16", "manufacturer": "Cisco", "platform": "IOS", "network_driver": "cisco_ios", "mask_length": 24, - } + }, } ####################################### ####################################### -class OnboardingNautobotAdapter(DiffSync): +class OnboardingNautobotAdapter(diffsync.DiffSync): """Adapter for loading Nautobot data.""" manufacturer = onboarding_models.OnboardingManufacturer @@ -85,10 +96,13 @@ def load_devices(self): if self.job.debug: self.job.logger.debug(f"Loading Device data from Nautobot...") - for device in Device.objects.filter(primary_ip4__host__in=self.job.ip_addresses): + # for device in Device.objects.filter(primary_ip4__host__in=self.job.ip_addresses): + for device in Device.objects.all(): interface_list = list() + # Only interfaces with the device's primeary ip should be considered for diff calculations for interface in device.interfaces.all(): - interface_list.append(interface.name) + if device.primary_ip4 in interface.ip_addresses.all(): + interface_list.append(interface.name) onboarding_device = self.device( diffsync=self, @@ -102,12 +116,12 @@ def load_devices(self): status__name=device.status.name, secrets_group__name=device.secrets_group.name if device.secrets_group else "", interfaces=interface_list, - mask_length=device.primary_ip4.mask_length if device.primary_ip4 else "", + mask_length=device.primary_ip4.mask_length if device.primary_ip4 else None, serial=device.serial, ) self.add(onboarding_device) if self.job.debug: - self.job.logger.debug(f"Platform: {device.name} loaded.") + self.job.logger.debug(f"Device: {device.name} loaded.") def load(self): """Load nautobot data.""" @@ -117,7 +131,7 @@ def load(self): self.load_devices() -class OnboardingNetworkAdapter(DiffSync): +class OnboardingNetworkAdapter(diffsync.DiffSync): """Adapter for loading device data from a network.""" device_data = None @@ -151,19 +165,76 @@ def _validate_ip_addresses(self, ip_addresses): raise netaddr.AddrConversionError def execute_command_getter(self): - command_getter_job = JobModel.objects.get(name="Command Getter for Device Onboarding").job_task - result = command_getter_job.s() - task_result_id = result.apply_async( - args=self.job.job_result.task_args, - kwargs=self.job.job_result.task_kwargs, - **self.job.job_result.celery_kwargs, - ) - time.sleep(15) - job_result = JobResult.objects.get(id=str(task_result_id)) - self.job.logger.warning(job_result.result) - self.job.logger.warning(task_result_id) - self.job.logger.warning(self.job.job_result.task_kwargs) - self.job.logger.warning(self.job.job_result.task_args) + if self.job.platform: + if not self.job.platform.network_driver: + self.job.logger.error( + f"The selected platform, {self.job.platform} " + "does not have a network driver, please update the Platform." + ) + raise Exception("Platform.network_driver missing") + try: + command_getter_job = JobModel.objects.get(name="Command Getter for Device Onboarding").job_task + result = command_getter_job.s() + task_result_id = result.apply_async( + args=self.job.job_result.task_args, + kwargs=self.job.job_result.task_kwargs, + **self.job.job_result.celery_kwargs, + ) + time.sleep(15) + job_result = JobResult.objects.get(id=str(task_result_id)) + self.job.logger.warning(job_result.result) + self.job.logger.warning(task_result_id) + self.job.logger.warning(self.job.job_result.task_kwargs) + self.job.logger.warning(self.job.job_result.task_args) + except JobResult.DoesNotExist: + self.logger.error("The CommandGetterDO job failed to return the expected result") + raise JobResult.DoesNotExist + + def load_manufacturers(self): + """Load manufacturer data into a DiffSync model.""" + for ip_address in self.device_data: + if self.job.debug: + self.job.logger.debug(f"loading manufacturer data for {ip_address}") + onboarding_manufacturer = self.manufacturer( + diffsync=self, + name=self.device_data[ip_address]["manufacturer"], + ) + try: + self.add(onboarding_manufacturer) + except diffsync.ObjectAlreadyExists: + pass + + def load_platforms(self): + """Load platform data into a DiffSync model.""" + for ip_address in self.device_data: + if self.job.debug: + self.job.logger.debug(f"loading platform data for {ip_address}") + onboarding_platform = self.platform( + diffsync=self, + name=self.device_data[ip_address]["platform"], + manufacturer__name=self.device_data[ip_address]["manufacturer"], + network_driver=self.device_data[ip_address]["network_driver"], + ) + try: + self.add(onboarding_platform) + except diffsync.ObjectAlreadyExists: + pass + + def load_device_types(self): + """Load device type data into a DiffSync model.""" + for ip_address in self.device_data: + if self.job.debug: + self.job.logger.debug(f"loading device_type data for {ip_address}") + onboarding_device_type = self.device_type( + diffsync=self, + model=self.device_data[ip_address]["device_type"], + part_number=self.device_data[ip_address]["device_type"], + manufacturer__name=self.device_data[ip_address]["manufacturer"], + ) + try: + self.add(onboarding_device_type) + except diffsync.ObjectAlreadyExists: + pass def load_devices(self): """Load device data into a DiffSync model.""" @@ -189,45 +260,17 @@ def load_devices(self): mask_length=self.device_data[ip_address]["mask_length"], serial=self.device_data[ip_address]["serial"], ) - self.add(onboarding_device) - - def load_device_types(self): - """Load device type data into a DiffSync model.""" - for ip_address in self.device_data: - if self.job.debug: - self.job.logger.debug(f"loading device_type data for {ip_address}") - onboarding_device_type = self.device_type( - diffsync=self, - model=self.device_data[ip_address]["device_type"], - part_number=self.device_data[ip_address]["device_type"], - manufacturer__name=self.device_data[ip_address]["manufacturer"], - ) - self.add(onboarding_device_type) - - def load_manufacturers(self): - """Load manufacturer data into a DiffSync model.""" - for ip_address in self.device_data: - if self.job.debug: - self.job.logger.debug(f"loading manufacturer data for {ip_address}") - onboarding_manufacturer = self.manufacturer( - diffsync=self, - name=self.device_data[ip_address]["manufacturer"], - ) - self.add(onboarding_manufacturer) - - def load_platforms(self): - """Load platform data into a DiffSync model.""" - for ip_address in self.device_data: - if self.job.debug: - self.job.logger.debug(f"loading platform data for {ip_address}") - onboarding_platform = self.platform( - diffsync=self, - name=self.device_data[ip_address]["platform"], - manufacturer__name=self.device_data[ip_address]["manufacturer"], - network_driver=self.device_data[ip_address]["network_driver"], - ) - self.add(onboarding_platform) - + try: + self.add(onboarding_device) + if self.job.debug: + self.job.logger.debug(f"Device: {self.device_data[ip_address]['hostname']} loaded.") + except diffsync.ObjectAlreadyExists: + self.job.logger.error( + f"Device: {self.device_data[ip_address]['hostname']} has already been loaded! " + f"Duplicate devices will not be synced. " + f"[Serial Number: {self.device_data[ip_address]['serial']}, " + f"IP Address: {ip_address}]" + ) def load(self): """Load network data.""" diff --git a/nautobot_device_onboarding/diffsync/models/onboarding_models.py b/nautobot_device_onboarding/diffsync/models/onboarding_models.py index fee53941..b310408b 100644 --- a/nautobot_device_onboarding/diffsync/models/onboarding_models.py +++ b/nautobot_device_onboarding/diffsync/models/onboarding_models.py @@ -4,12 +4,12 @@ from typing import List, Optional import netaddr -from django.core.exceptions import ObjectDoesNotExist, ValidationError +from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist, ValidationError from nautobot.apps.choices import InterfaceTypeChoices, PrefixTypeChoices from nautobot.dcim.models import Device, DeviceType, Interface, Manufacturer, Platform from nautobot.extras.models import Role, SecretsGroup, Status from nautobot.ipam.models import IPAddress, Prefix -from nautobot_ssot.contrib import NautobotModel +from nautobot_ssot.contrib import NautobotModel from diffsync import DiffSyncModel @@ -17,7 +17,11 @@ class OnboardingDevice(DiffSyncModel): _modelname = "device" # _model = Device - _identifiers = ("location__name", "name", "serial",) + _identifiers = ( + "location__name", + "name", + "serial", + ) _attributes = ( "device_type__model", "mask_length", @@ -53,28 +57,33 @@ def _get_or_create_device(cls, platform, diffsync, ids, attrs): """Attempt to get a Device, create a new one if necessary.""" device = None try: - # Only Devices with a primary ip address are loaded from Nautobot when syncing. + # Only Devices with a primary ip address are loaded from Nautobot when syncing. # If a device is found in Nautobot with a matching name and location as the - # device being created, but does not have a primary ip address, it will need + # device being created, but does not have a primary ip address, it will need # to be updated or skipped based on user preference. - device = Device.objects.get(name=ids["name"], location=diffsync.job.location) + device = Device.objects.get( + name=ids["name"], + location=diffsync.job.location, + ) if diffsync.job.update_devices_without_primary_ip: diffsync.job.logger.warning( - f"Device {ids['name']} already exists in Nautobot but does not have a primary " - "IP Address, this device will be udpated.") - # attrs.update(ids) - diffsync.job.logger.warning(attrs) - #TODO: update matched device! + f"Device {ids['name']} at location {diffsync.job.location} already exists in Nautobot " + "but the primary ip address either does not exist, or doesn't match an entered ip address. " + "This device will be updated." + ) + device = cls._update_device_with_attrs(device, platform, ids, attrs, diffsync) else: diffsync.job.logger.warning( - f"Device {ids['name']} already exists in Nautobot but does not have a primary " - "IP Address, this device will be skipped.") - return device - + f"Device {ids['name']} at location {diffsync.job.location} already exists in Nautobot " + "but the primary ip address either does not exist, or doesn't match an entered ip address. " + "IP Address, this device will be skipped." + ) + return None + except ObjectDoesNotExist: # Create Device - device = Device.objects.create( + device = Device( location=diffsync.job.location, status=diffsync.job.device_status, role=diffsync.job.device_role, @@ -82,8 +91,9 @@ def _get_or_create_device(cls, platform, diffsync, ids, attrs): name=ids["name"], platform=platform, secrets_group=diffsync.job.secrets_group, - serial=attrs["serial"] + serial=ids["serial"], ) + device.validated_save() return device @classmethod @@ -92,7 +102,8 @@ def _get_or_create_ip_address(cls, diffsync, attrs): ip_address = None try: ip_address = IPAddress.objects.get( - address=f"{attrs['primary_ip4__host']}/{attrs['mask_length']}", + host=attrs["primary_ip4__host"], + mask_length=attrs["mask_length"], parent__namespace=diffsync.job.namespace, ) except ObjectDoesNotExist: @@ -126,7 +137,7 @@ def _get_or_create_ip_address(cls, diffsync, attrs): parent=prefix, ) return ip_address - + @classmethod def _get_or_create_interface(cls, diffsync, device, attrs): """Attempt to get a Device Interface, create a new one if necessary.""" @@ -149,6 +160,19 @@ def _get_or_create_interface(cls, diffsync, device, attrs): diffsync.job.logger.error(f"Device Interface could not be created, {err}") return device_interface + @classmethod + def _update_device_with_attrs(cls, device, platform, ids, attrs, diffsync): + """Update a Nautobot device instance with all the values in the diffsync model ids and attrs.""" + device.location = diffsync.job.location + device.status = diffsync.job.device_status + device.role = diffsync.job.device_role + device.device_type = DeviceType.objects.get(model=attrs["device_type__model"]) + device.platform = platform + device.secrets_group = diffsync.job.secrets_group + device.serial = ids["serial"] + + return device + @classmethod def create(cls, diffsync, ids, attrs): """Create a new nautobot device using data scraped from a device.""" @@ -158,32 +182,53 @@ def create(cls, diffsync, ids, attrs): platform = diffsync.job.platform else: platform = Platform.objects.get(name=attrs["platform__name"]) - + # Get or create Device, Interface and IP Address device = cls._get_or_create_device(platform, diffsync, ids, attrs) - ip_address = cls._get_or_create_ip_address(diffsync=diffsync, attrs=attrs) - interface = cls._get_or_create_interface(diffsync=diffsync, device=device, attrs=attrs) - interface.ip_addresses.add(ip_address) - interface.validated_save() + if device: + ip_address = cls._get_or_create_ip_address(diffsync=diffsync, attrs=attrs) + interface = cls._get_or_create_interface(diffsync=diffsync, device=device, attrs=attrs) + interface.ip_addresses.add(ip_address) + interface.validated_save() - # Assign primary IP Address to Device - try: + # Assign primary IP Address to Device device.primary_ip4 = ip_address - device.validated_save() - except ValidationError as err: - diffsync.job.logger.error( - f"Failed to save changes to {attrs['primary_ip4__host']} Device: {ids['name']}, {err}" - ) + + try: + device.validated_save() + except ValidationError as err: + diffsync.job.logger.error(f"Failed to create or update Device: {ids['name']}, {err}") + raise ValidationError(err) + else: + diffsync.job.logger.error(f"Failed create or update Device: {ids['name']}") return DiffSyncModel.create(diffsync=diffsync, ids=ids, attrs=attrs) def update(self, attrs): """Update an existing nautobot device using data scraped from a device.""" device = Device.objects.get(name=self.name, location__name=self.location__name) - self._update_device_attributes(device=device, attrs=attrs) + + # Update the interface management only setting to reflect the form input + try: + interface = Interface.objects.get( + device=device, + ip_addresses__in=[device.primary_ip4], + name=self.get_attrs()["interfaces"][0], + ) + if interface.mgmt_only is not self.diffsync.job.management_only_interface: + interface.mgmt_only = self.diffsync.job.management_only_interface + interface.validated_save() + self.diffsync.job.logger.info( + f"Device: {device.name}, Interface: {interface.name}, " + f"Management Only set to {self.diffsync.job.management_only_interface}" + ) + except Exception as err: + self.diffsync.job.logger.error( + f"Unable to update the management only setting on device {device.name}, {err}" + ) if self.diffsync.job.debug: - self.diffsync.job.logger.debug(f"Updating device with attrs: {attrs}") + self.diffsync.job.logger.debug(f"Updating {device.name} with attrs: {attrs}") if attrs.get("device_type__model"): device.device_type = DeviceType.objects.get(model=attrs.get("device_type__model")) if attrs.get("platform__name"): @@ -214,39 +259,43 @@ def update(self, attrs): interface.validated_save() # Update the interface only else: - # Check for a device with a matching IP Address and remove it before assigning - # the IP Address to the new interface + # Check for an interface with a matching IP Address and remove it before + # assigning the IP Address to the new interface try: old_interface = Interface.objects.get( device=device, - ip_addresses__in=[device.primary_ip4] + ip_addresses__in=[device.primary_ip4], ) old_interface.ip_addresses.remove(device.primary_ip4) - old_interface.validated_save() interface.ip_addresses.add(device.primary_ip4) interface.validated_save() + except MultipleObjectsReturned: + self.diffsync.job.logger.warning( + f"{device.primary_ip4} is assigned to multiple interfaces. A new " + "interface will be created and assigned this IP Address, but the " + "duplicate assignments will remain." + ) except ObjectDoesNotExist: interface.ip_addresses.add(device.primary_ip4) interface.validated_save() - # Update the primary ip address only else: + # Update the primary ip address only + # The OnboardingNautobotAdapter filters out devices without a primary ip4, + # so this will not be called unless the adapter is changed to include all devices if attrs.get("primary_ip4__host"): if not attrs.get("mask_length"): attrs["mask_length"] = device.primary_ip4.mask_length ip_address = self._get_or_create_ip_address(diffsync=self.diffsync, attrs=attrs) interface = Interface.objects.get( - device=device, - ip_addresses__in=[device.primary_ip4] - ) - interface.ip_addresses.remove(device.primary_ip4) #TODO: This is not removing the IP from the interface as expected - interface.validated_save() + device=device, ip_addresses__in=[device.primary_ip4], name=self.get_attrs()["interfaces"][0] + ) interface.ip_addresses.add(ip_address) interface.validated_save() device.primary_ip4 = ip_address try: device.validated_save() except ValidationError as err: - self.diffsync.job.logger.error(f"Device {self.name} failed to update, {err}") + self.diffsync.job.logger.error(f"Device {device.name} failed to update, {err}") return super().update(attrs) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index d1989d8d..48031ba0 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -235,9 +235,11 @@ class Meta: ) namespace = ObjectVar(model=Namespace, description="Namespace ip addresses belong to.") ip_addresses = StringVar( - description="IP Address of the device to onboard, specify in a comma separated list for multiple devices.", - label="IPv4 Addresses", + description="IP address of the device to onboard, specify in a comma separated list for multiple devices.", + label="IPv4 addresses", ) + port = IntegerVar(default=22) + timeout = IntegerVar(default=30) management_only_interface = BooleanVar( default=False, label="Set Management Only", @@ -245,35 +247,34 @@ class Meta: ) update_devices_without_primary_ip = BooleanVar( default=False, - description="If a device at the specified location already exists in Nautobot but is " - "missing a primary ip address, update it with the sync." + description="If a device at the specified location already exists in Nautobot but the primary ip address " + "does not match an ip address entered, update this device with the sync." ) device_role = ObjectVar( model=Role, query_params={"content_types": "dcim.device"}, required=True, - description="Role to be applied to all onboarded devices", + description="Role to be applied to all new onboarded devices", ) device_status = ObjectVar( model=Status, query_params={"content_types": "dcim.device"}, required=True, - description="Status to be applied to all onboarded devices", + description="Status to be applied to all new onboarded devices", ) interface_status = ObjectVar( model=Status, query_params={"content_types": "dcim.interface"}, required=True, - description="Status to be applied to all onboarded device interfaces", + description="Status to be applied to all new onboarded device interfaces", ) ip_address_status = ObjectVar( + label="IP address status", model=Status, query_params={"content_types": "ipam.ipaddress"}, required=True, - description="Status to be applied to all onboarded ip addresses.", + description="Status to be applied to all new onboarded IP addresses.", ) - port = IntegerVar(default=22) - timeout = IntegerVar(default=30) secrets_group = ObjectVar( model=SecretsGroup, required=True, description="SecretsGroup for device connection credentials." ) @@ -313,19 +314,21 @@ def run(self, dryrun, memory_profiling, *args, **kwargs): # pylint:disable=argu self.secrets_group = kwargs["secrets_group"] self.platform = kwargs["platform"] - print(self.job_result.as_dict()) - - kwargs["location"] = kwargs["location"].id - kwargs["namespace"] = kwargs["namespace"].id - kwargs["device_role"] = kwargs["device_role"].id - kwargs["device_status"] = kwargs["device_status"].id - kwargs["interface_status"] = kwargs["interface_status"].id - kwargs["ip_address_status"] = kwargs["ip_address_status"].id - kwargs["secrets_group"] = kwargs["secrets_group"].id - kwargs["platform"] = self.platform = kwargs["platform"].id if kwargs["platform"] else "" + nautobot_object_models = [ + "location", + "namespace", + "device_role", + "device_status", + "interface_status", + "ip_address_status", + "secrets_group", + "platform", + ] + # Convert model instances into IDs, necessary for sending form inputs to the worker for use in other jobs + for model in nautobot_object_models: + kwargs[model] = kwargs[model].id if kwargs[model] else None self.job_result.task_kwargs = kwargs - super().run(dryrun, memory_profiling, *args, **kwargs) class SSOTNetworkImporter(DataSource): @@ -387,32 +390,33 @@ class Meta: # pylint: disable=too-few-public-methods ) update_devices_without_primary_ip = BooleanVar( default=False, - description="If a device at the specified location already exists in Nautobot but is " - "missing a primary ip address, update it with the sync." + description="If a device at the specified location already exists in Nautobot but the primary ip address " + "does not match an ip address entered, update this device with the sync." ) device_role = ObjectVar( model=Role, query_params={"content_types": "dcim.device"}, required=True, - description="Role to be applied to all onboarded devices", + description="Role to be applied to all new onboarded devices", ) device_status = ObjectVar( model=Status, query_params={"content_types": "dcim.device"}, required=True, - description="Status to be applied to all onboarded devices", + description="Status to be applied to all new onboarded devices", ) interface_status = ObjectVar( model=Status, query_params={"content_types": "dcim.interface"}, required=True, - description="Status to be applied to all onboarded device interfaces", + description="Status to be applied to all new onboarded device interfaces", ) ip_address_status = ObjectVar( + label="IP address status", model=Status, query_params={"content_types": "ipam.ipaddress"}, required=True, - description="Status to be applied to all onboarded ip addresses.", + description="Status to be applied to all new onboarded IP addresses.", ) port = IntegerVar(default=22) timeout = IntegerVar(default=30) From c5985df817ac9334a207488c47d4d180332c983f Mon Sep 17 00:00:00 2001 From: David Cates Date: Thu, 25 Jan 2024 12:29:13 -0700 Subject: [PATCH 016/225] update ssot integration --- .../diffsync/adapters/onboarding_adapters.py | 41 +++++++++---------- 1 file changed, 20 insertions(+), 21 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index 30711d08..c7d82145 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -3,10 +3,10 @@ import time import netaddr +from nautobot.extras.models import JobResult, Job from nautobot.dcim.models import Device, DeviceType, Manufacturer, Platform -from nautobot.extras.models.jobs import Job as JobModel -from nautobot.extras.models.jobs import JobResult from nautobot_device_onboarding.diffsync.models import onboarding_models +from nautobot.apps.choices import JobResultStatusChoices import diffsync @@ -14,7 +14,7 @@ # FOR TESTING ONLY - TO BE REMOVED # ####################################### mock_data = { - "10.1.1.15": { + "10.1.1.8": { "hostname": "demo-cisco-xe1", "serial": "9ABUXU581111", "device_type": "CSR1000V17", @@ -24,7 +24,7 @@ "network_driver": "cisco_ios", "mask_length": 16, }, - "200.1.1.13": { + "10.1.1.9": { "hostname": "demo-cisco-xe2", "serial": "9ABUXU5882222", "device_type": "CSR1000V2", @@ -172,23 +172,22 @@ def execute_command_getter(self): "does not have a network driver, please update the Platform." ) raise Exception("Platform.network_driver missing") - try: - command_getter_job = JobModel.objects.get(name="Command Getter for Device Onboarding").job_task - result = command_getter_job.s() - task_result_id = result.apply_async( - args=self.job.job_result.task_args, - kwargs=self.job.job_result.task_kwargs, - **self.job.job_result.celery_kwargs, - ) - time.sleep(15) - job_result = JobResult.objects.get(id=str(task_result_id)) - self.job.logger.warning(job_result.result) - self.job.logger.warning(task_result_id) - self.job.logger.warning(self.job.job_result.task_kwargs) - self.job.logger.warning(self.job.job_result.task_args) - except JobResult.DoesNotExist: - self.logger.error("The CommandGetterDO job failed to return the expected result") - raise JobResult.DoesNotExist + + command_getter_job = Job.objects.get(name="Command Getter for Device Onboarding") + result = JobResult.enqueue_job( + job_model=command_getter_job, + user=self.job.user, + celery_kwargs=self.job.job_result.celery_kwargs, + *self.job.job_result.task_args, + **self.job.job_result.task_kwargs + ) + while True: + if result.status not in JobResultStatusChoices.READY_STATES: + time.sleep(5) + result.refresh_from_db() + else: + break + self.device_data = result.result def load_manufacturers(self): """Load manufacturer data into a DiffSync model.""" From 28d37e4f0a5f71f4d8e25b4c20e73f5848ce01ed Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Thu, 25 Jan 2024 15:35:34 -0700 Subject: [PATCH 017/225] wip --- nautobot_device_onboarding/constants.py | 1 + nautobot_device_onboarding/nornir_plays/processor.py | 2 ++ nautobot_device_onboarding/utils/inventory_creator.py | 9 ++++++--- 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/nautobot_device_onboarding/constants.py b/nautobot_device_onboarding/constants.py index ea2332c2..a469e75e 100644 --- a/nautobot_device_onboarding/constants.py +++ b/nautobot_device_onboarding/constants.py @@ -12,4 +12,5 @@ PLATFORM_COMMAND_MAP = { "cisco_ios": ["show version", "show inventory", "show interfaces"], "cisco_nxos": ["show version", "show inventory", "show interface"], + "cisco_xe": ["show version", "show inventory", "show interfaces"], } diff --git a/nautobot_device_onboarding/nornir_plays/processor.py b/nautobot_device_onboarding/nornir_plays/processor.py index 1a8e9882..96e620ba 100755 --- a/nautobot_device_onboarding/nornir_plays/processor.py +++ b/nautobot_device_onboarding/nornir_plays/processor.py @@ -69,6 +69,8 @@ def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult if host.platform == "cisco_ios": formatted_data = format_ob_data_ios(host, result) + elif host.platform == "cisco_xe": + formatted_data = format_ob_data_ios(host, result) elif host.platform == "cisco_nxos": formatted_data = format_ob_data_nxos(host, result) else: diff --git a/nautobot_device_onboarding/utils/inventory_creator.py b/nautobot_device_onboarding/utils/inventory_creator.py index 3cf61ebb..8ee688fa 100755 --- a/nautobot_device_onboarding/utils/inventory_creator.py +++ b/nautobot_device_onboarding/utils/inventory_creator.py @@ -35,7 +35,7 @@ def _parse_credentials(credentials): return (username, password, secret) -def guess_netmiko_device_type(hostname, username, password): +def guess_netmiko_device_type(hostname, username, password, port): """Guess the device type of host, based on Netmiko.""" guessed_device_type = None @@ -46,6 +46,7 @@ def guess_netmiko_device_type(hostname, username, password): "host": hostname, "username": username, "password": password, + "port": port **netmiko_optional_args, } @@ -63,8 +64,10 @@ def _set_inventory(ips, platform, port, secrets_group): inv = {} username, password, secret = _parse_credentials(secrets_group) for host_ip in ips: - if not platform: - platform = guess_netmiko_device_type(host_ip, username, password) + if platform: + platform = platform.network_driver + else: + platform = guess_netmiko_device_type(host_ip, username, password, port) host = Host( name=host_ip, From 4ebb340b450393d2b463852721eaa7ad9d058a08 Mon Sep 17 00:00:00 2001 From: David Cates Date: Fri, 26 Jan 2024 17:34:19 -0700 Subject: [PATCH 018/225] update ssot integration --- .../adapters/network_importer_adapters.py | 51 ++- .../models/network_importer_models.py | 89 +++++ .../diffsync/models/onboarding_models.py | 40 +-- nautobot_device_onboarding/jobs.py | 75 +++- poetry.lock | 326 +++++++++--------- 5 files changed, 384 insertions(+), 197 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index 9c7421ab..d99fe7e5 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -1,13 +1,54 @@ """DiffSync adapters.""" from nautobot_ssot.contrib import NautobotAdapter +from nautobot_device_onboarding.diffsync.models import network_importer_models +import diffsync -from diffsync import DiffSync +class FilteredNautobotAdapter(NautobotAdapter): + """ + Allow for filtering of data loaded from Nautobot into DiffSync models. -class NetworkImporterNautobotAdapter(NautobotAdapter): - pass + Must be used with FilteredNautobotModel. + """ + + def _load_objects(self, diffsync_model): + """Given a diffsync model class, load a list of models from the database and return them.""" + parameter_names = self._get_parameter_names(diffsync_model) + for database_object in diffsync_model._get_queryset(diffsync=self): + self.job.logger.debug(f"LOADING: Database Object: {database_object}, " + f"Model Name: {diffsync_model._modelname}, " + f"Parameter Names: {parameter_names}") + self._load_single_object(database_object, diffsync_model, parameter_names) -class NetworkImporterNetworkAdapter(DiffSync): - pass +class NetworkImporterNautobotAdapter(FilteredNautobotAdapter): + """Adapter for loading Nautobot data.""" + + device = network_importer_models.NetworkImporterDevice + interface = network_importer_models.NetworkImporterInterface + ip_address = network_importer_models.NetworkImporterIPAddress + + top_level = ["ip_address", "interface", "device"] + + +class NetworkImporterNetworkAdapter(diffsync.DiffSync): + """Adapter for loading Network data.""" + + def __init__(self, *args, job, sync=None, **kwargs): + """Instantiate this class, but do not load data immediately from the local system.""" + super().__init__(*args, **kwargs) + self.job = job + self.sync = sync + + device = network_importer_models.NetworkImporterDevice + interface = network_importer_models.NetworkImporterInterface + ip_address = network_importer_models.NetworkImporterIPAddress + + top_level = ["ip_address", "interface", "device"] + + def load_devices(self): + pass + + def load(self): + self.load_devices() diff --git a/nautobot_device_onboarding/diffsync/models/network_importer_models.py b/nautobot_device_onboarding/diffsync/models/network_importer_models.py index 72f34562..3f33eef7 100644 --- a/nautobot_device_onboarding/diffsync/models/network_importer_models.py +++ b/nautobot_device_onboarding/diffsync/models/network_importer_models.py @@ -1 +1,90 @@ """Diffsync models.""" + +from nautobot_ssot.contrib import NautobotModel +from nautobot.dcim.models import Device, Interface +from nautobot.ipam.models import IPAddress +from typing import List, Optional +from diffsync import DiffSync + + +class FilteredNautobotModel(NautobotModel): + """ + Allow for filtering of data loaded from Nautobot into DiffSync models. + + Must be used with FilteredNautobotAdapter. + """ + + @classmethod + def _get_queryset(cls, diffsync: "DiffSync"): + """Get the queryset used to load the models data from Nautobot.""" + parameter_names = list(cls._identifiers) + list(cls._attributes) + # Here we identify any foreign keys (i.e. fields with '__' in them) so that we can load them directly in the + # first query if this function hasn't been overridden. + prefetch_related_parameters = [parameter.split("__")[0] for parameter in parameter_names if "__" in parameter] + qs = cls.get_queryset(diffsync=diffsync) + return qs.prefetch_related(*prefetch_related_parameters) + + @classmethod + def get_queryset(cls, diffsync: "DiffSync"): + """Get the queryset used to load the models data from Nautobot.""" + # Replace return with a filtered queryset. + # Access the job form inputs with diffsync ex: diffsync.job.location.name + return cls._model.objects.all() + + +class NetworkImporterDevice(FilteredNautobotModel): + _modelname = "device" + _model = Device + _identifiers = ( + "location__name", + "name", + "serial", + ) + + name: str + location__name: str + serial: str + + @classmethod + def _get_queryset(cls, diffsync: "DiffSync"): + """Get the queryset used to load the models data from Nautobot.""" + filter = {} + + if diffsync.job.devices: + filter["id__in"] = [device.id for device in diffsync.job.devices] + if diffsync.job.location: + filter["location"] = diffsync.job.location + if diffsync.job.device_role: + filter["role"] = diffsync.job.device_role + if diffsync.job.tag: + filter["tags"] = diffsync.job.tag + filtered_qs = cls._model.objects.filter(**filter) + + if filter: + return filtered_qs + else: + diffsync.job.logger.error("No device filter options were provided, no devices will be synced.") + return cls._model.objects.none() + + +class NetworkImporterInterface(FilteredNautobotModel): + _modelname = "interface" + _model = Interface + _identifiers = ( + "device__name", + "name", + ) + + device__name: str + name: str + +class NetworkImporterIPAddress(FilteredNautobotModel): + _modelname = "ip_address" + _model = IPAddress + _identifiers = ( + "parent__namespace__name", + "host", + ) + + parent__namespace__name: str + host: str diff --git a/nautobot_device_onboarding/diffsync/models/onboarding_models.py b/nautobot_device_onboarding/diffsync/models/onboarding_models.py index b310408b..7e6c60dd 100644 --- a/nautobot_device_onboarding/diffsync/models/onboarding_models.py +++ b/nautobot_device_onboarding/diffsync/models/onboarding_models.py @@ -3,7 +3,6 @@ import ipaddress from typing import List, Optional -import netaddr from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist, ValidationError from nautobot.apps.choices import InterfaceTypeChoices, PrefixTypeChoices from nautobot.dcim.models import Device, DeviceType, Interface, Manufacturer, Platform @@ -16,7 +15,6 @@ class OnboardingDevice(DiffSyncModel): _modelname = "device" - # _model = Device _identifiers = ( "location__name", "name", @@ -37,7 +35,7 @@ class OnboardingDevice(DiffSyncModel): name: str location__name: str - serial: Optional[str] + serial: str device_type__model: Optional[str] mask_length: Optional[int] @@ -50,7 +48,6 @@ class OnboardingDevice(DiffSyncModel): status__name: Optional[str] interfaces: Optional[list] - device_type: List["OnboardingDeviceType"] = [] @classmethod def _get_or_create_device(cls, platform, diffsync, ids, attrs): @@ -59,8 +56,8 @@ def _get_or_create_device(cls, platform, diffsync, ids, attrs): try: # Only Devices with a primary ip address are loaded from Nautobot when syncing. # If a device is found in Nautobot with a matching name and location as the - # device being created, but does not have a primary ip address, it will need - # to be updated or skipped based on user preference. + # device being created, but the primary ip address doesn't match an ip address entered, + # the matching device will be updated or skipped based on user preference. device = Device.objects.get( name=ids["name"], @@ -162,7 +159,7 @@ def _get_or_create_interface(cls, diffsync, device, attrs): @classmethod def _update_device_with_attrs(cls, device, platform, ids, attrs, diffsync): - """Update a Nautobot device instance with all the values in the diffsync model ids and attrs.""" + """Update a Nautobot device instance.""" device.location = diffsync.job.location device.status = diffsync.job.device_status device.role = diffsync.job.device_role @@ -202,31 +199,12 @@ def create(cls, diffsync, ids, attrs): else: diffsync.job.logger.error(f"Failed create or update Device: {ids['name']}") - return DiffSyncModel.create(diffsync=diffsync, ids=ids, attrs=attrs) + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) def update(self, attrs): """Update an existing nautobot device using data scraped from a device.""" device = Device.objects.get(name=self.name, location__name=self.location__name) - # Update the interface management only setting to reflect the form input - try: - interface = Interface.objects.get( - device=device, - ip_addresses__in=[device.primary_ip4], - name=self.get_attrs()["interfaces"][0], - ) - if interface.mgmt_only is not self.diffsync.job.management_only_interface: - interface.mgmt_only = self.diffsync.job.management_only_interface - interface.validated_save() - self.diffsync.job.logger.info( - f"Device: {device.name}, Interface: {interface.name}, " - f"Management Only set to {self.diffsync.job.management_only_interface}" - ) - except Exception as err: - self.diffsync.job.logger.error( - f"Unable to update the management only setting on device {device.name}, {err}" - ) - if self.diffsync.job.debug: self.diffsync.job.logger.debug(f"Updating {device.name} with attrs: {attrs}") if attrs.get("device_type__model"): @@ -239,8 +217,6 @@ def update(self, attrs): device.status = Status.objects.get(name=attrs.get("status__name")) if attrs.get("secrets_group__name"): device.secrets_group = SecretsGroup.objects.get(name=attrs.get("secrets_group__name")) - if attrs.get("primary_ip4__status__name"): - device.primary_ip.status.name = Status.objects.get(name=attrs.get("primary_ip4__status__name")) if attrs.get("serial"): device.primary_ip.serial = attrs.get("serial") @@ -280,8 +256,10 @@ def update(self, attrs): interface.validated_save() else: # Update the primary ip address only - # The OnboardingNautobotAdapter filters out devices without a primary ip4, - # so this will not be called unless the adapter is changed to include all devices + + # The OnboardingNautobotAdapter only loads devices with primary ips matching those + # entered for onboarding. This will not be called unless the adapter is changed to + # include all devices if attrs.get("primary_ip4__host"): if not attrs.get("mask_length"): attrs["mask_length"] = device.primary_ip4.mask_length diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 48031ba0..859101ec 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -4,11 +4,11 @@ from diffsync.enum import DiffSyncFlags from django.conf import settings from django.templatetags.static import static -from nautobot.apps.jobs import BooleanVar, IntegerVar, Job, ObjectVar, StringVar +from nautobot.apps.jobs import BooleanVar, IntegerVar, Job, ObjectVar, StringVar, MultiObjectVar from nautobot.core.celery import register_jobs -from nautobot.dcim.models import DeviceType, Location, Platform +from nautobot.dcim.models import DeviceType, Location, Platform, Device from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices -from nautobot.extras.models import Role, SecretsGroup, SecretsGroupAssociation, Status +from nautobot.extras.models import Role, SecretsGroup, SecretsGroupAssociation, Status, Tag from nautobot.ipam.models import Namespace from nautobot_device_onboarding.diffsync.adapters.network_importer_adapters import ( NetworkImporterNautobotAdapter, @@ -334,7 +334,10 @@ def run(self, dryrun, memory_profiling, *args, **kwargs): # pylint:disable=argu class SSOTNetworkImporter(DataSource): """Job syncing extended device attributes into Nautobot.""" - debug = BooleanVar(description="Enable for more verbose logging.") + def __init__(self): + """Initialize SSOTDeviceOnboarding.""" + super().__init__() + self.diffsync_flags = DiffSyncFlags.SKIP_UNMATCHED_DST class Meta: """Metadata about this Job.""" @@ -345,6 +348,68 @@ class Meta: "including Interfaces, IPAddresses, Prefixes, Vlans and Cables." ) + debug = BooleanVar(description="Enable for more verbose logging.") + + devices = MultiObjectVar( + model=Device, + required=False, + description="Device(s) to update.", + ) + location = ObjectVar( + model=Location, + query_params={"content_type": "dcim.device"}, + required=False, + description="Only update devices at a specific location.", + ) + device_role = ObjectVar( + model=Role, + query_params={"content_types": "dcim.device"}, + required=False, + description="Only update devices with the selected role.", + ) + tag = ObjectVar( + model=Tag, + query_params={"content_types": "dcim.device"}, + required=False, + description="Only update devices with the selected tag.", + ) + + def load_source_adapter(self): + """Load onboarding network adapter.""" + self.source_adapter = NetworkImporterNetworkAdapter(job=self, sync=self.sync) + self.source_adapter.load() + + def load_target_adapter(self): + """Load onboarding Nautobot adapter.""" + self.target_adapter = NetworkImporterNautobotAdapter(job=self, sync=self.sync) + self.target_adapter.load() + + def run(self, dryrun, memory_profiling, *args, **kwargs): # pylint:disable=arguments-differ + """Run sync.""" + + self.dryrun = dryrun + self.memory_profiling = memory_profiling + self.debug = kwargs["debug"] + self.location = kwargs["location"] + self.devices = kwargs["devices"] + self.device_role = kwargs["device_role"] + self.tag = kwargs["tag"] + + nautobot_object_models = [ + "location", + "devices", + "device_role", + "device_role", + "tag", + ] + # Convert model instances into IDs, necessary for sending form inputs to the worker for use in other jobs + # TODO: MultiObjectVars need to be converted to ids for transver to the command getter + # for model in nautobot_object_models: + # kwargs[model] = kwargs[model].id if kwargs[model] else None + + self.job_result.task_kwargs = kwargs + super().run(dryrun, memory_profiling, *args, **kwargs) + class CommandGetterDO(Job): """Simple Job to Execute Show Command.""" @@ -478,5 +543,5 @@ def run(self, *args, **kwargs): return compiled_results -jobs = [OnboardingTask, SSOTDeviceOnboarding, CommandGetterDO] +jobs = [OnboardingTask, SSOTDeviceOnboarding, SSOTNetworkImporter, CommandGetterDO] register_jobs(*jobs) diff --git a/poetry.lock b/poetry.lock index 0d1a35b1..427076dd 100644 --- a/poetry.lock +++ b/poetry.lock @@ -29,7 +29,6 @@ files = [ name = "anyio" version = "4.2.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -147,24 +146,24 @@ tzdata = ["tzdata"] [[package]] name = "bandit" -version = "1.7.6" +version = "1.7.7" description = "Security oriented static analyser for python code." optional = false python-versions = ">=3.8" files = [ - {file = "bandit-1.7.6-py3-none-any.whl", hash = "sha256:36da17c67fc87579a5d20c323c8d0b1643a890a2b93f00b3d1229966624694ff"}, - {file = "bandit-1.7.6.tar.gz", hash = "sha256:72ce7bc9741374d96fb2f1c9a8960829885f1243ffde743de70a19cee353e8f3"}, + {file = "bandit-1.7.7-py3-none-any.whl", hash = "sha256:17e60786a7ea3c9ec84569fd5aee09936d116cb0cb43151023258340dbffb7ed"}, + {file = "bandit-1.7.7.tar.gz", hash = "sha256:527906bec6088cb499aae31bc962864b4e77569e9d529ee51df3a93b4b8ab28a"}, ] [package.dependencies] colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} -GitPython = ">=3.1.30" PyYAML = ">=5.3.1" rich = "*" stevedore = ">=1.20.0" [package.extras] -test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "tomli (>=1.1.0)"] +baseline = ["GitPython (>=3.1.30)"] +test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)"] toml = ["tomli (>=1.1.0)"] yaml = ["PyYAML"] @@ -221,33 +220,33 @@ files = [ [[package]] name = "black" -version = "23.12.1" +version = "24.1.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, - {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, - {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, - {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, - {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, - {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, - {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, - {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, - {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, - {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, - {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, - {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, - {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, - {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, - {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, - {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, - {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, - {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, - {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, - {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, - {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, - {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, + {file = "black-24.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:94d5280d020dadfafc75d7cae899609ed38653d3f5e82e7ce58f75e76387ed3d"}, + {file = "black-24.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aaf9aa85aaaa466bf969e7dd259547f4481b712fe7ee14befeecc152c403ee05"}, + {file = "black-24.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec489cae76eac3f7573629955573c3a0e913641cafb9e3bfc87d8ce155ebdb29"}, + {file = "black-24.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:a5a0100b4bdb3744dd68412c3789f472d822dc058bb3857743342f8d7f93a5a7"}, + {file = "black-24.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6cc5a6ba3e671cfea95a40030b16a98ee7dc2e22b6427a6f3389567ecf1b5262"}, + {file = "black-24.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0e367759062dcabcd9a426d12450c6d61faf1704a352a49055a04c9f9ce8f5a"}, + {file = "black-24.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be305563ff4a2dea813f699daaffac60b977935f3264f66922b1936a5e492ee4"}, + {file = "black-24.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:6a8977774929b5db90442729f131221e58cc5d8208023c6af9110f26f75b6b20"}, + {file = "black-24.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d74d4d0da276fbe3b95aa1f404182562c28a04402e4ece60cf373d0b902f33a0"}, + {file = "black-24.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39addf23f7070dbc0b5518cdb2018468ac249d7412a669b50ccca18427dba1f3"}, + {file = "black-24.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:827a7c0da520dd2f8e6d7d3595f4591aa62ccccce95b16c0e94bb4066374c4c2"}, + {file = "black-24.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:0cd59d01bf3306ff7e3076dd7f4435fcd2fafe5506a6111cae1138fc7de52382"}, + {file = "black-24.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf8dd261ee82df1abfb591f97e174345ab7375a55019cc93ad38993b9ff5c6ad"}, + {file = "black-24.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:82d9452aeabd51d1c8f0d52d4d18e82b9f010ecb30fd55867b5ff95904f427ff"}, + {file = "black-24.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9aede09f72b2a466e673ee9fca96e4bccc36f463cac28a35ce741f0fd13aea8b"}, + {file = "black-24.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:780f13d03066a7daf1707ec723fdb36bd698ffa29d95a2e7ef33a8dd8fe43b5c"}, + {file = "black-24.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a15670c650668399c4b5eae32e222728185961d6ef6b568f62c1681d57b381ba"}, + {file = "black-24.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1e0fa70b8464055069864a4733901b31cbdbe1273f63a24d2fa9d726723d45ac"}, + {file = "black-24.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fa8d9aaa22d846f8c0f7f07391148e5e346562e9b215794f9101a8339d8b6d8"}, + {file = "black-24.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:f0dfbfbacfbf9cd1fac7a5ddd3e72510ffa93e841a69fcf4a6358feab1685382"}, + {file = "black-24.1.0-py3-none-any.whl", hash = "sha256:5134a6f6b683aa0a5592e3fd61dd3519d8acd953d93e2b8b76f9981245b65594"}, + {file = "black-24.1.0.tar.gz", hash = "sha256:30fbf768cd4f4576598b1db0202413fafea9a227ef808d1a12230c643cefe9fc"}, ] [package.dependencies] @@ -571,63 +570,63 @@ files = [ [[package]] name = "coverage" -version = "7.4.0" +version = "7.4.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, - {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, - {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, - {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, - {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, - {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, - {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, - {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, - {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, - {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, - {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, - {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, - {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, - {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, - {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, - {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, - {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, - {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, - {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, - {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, - {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, - {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, - {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, - {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, - {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, - {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, - {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, - {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, + {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, + {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, + {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, + {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, + {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, + {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, + {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, + {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, + {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, + {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, + {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, + {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, ] [package.extras] @@ -648,47 +647,56 @@ dev = ["polib"] [[package]] name = "cryptography" -version = "41.0.7" +version = "42.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, - {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, - {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, - {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, -] - -[package.dependencies] -cffi = ">=1.12" + {file = "cryptography-42.0.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:265bdc693570b895eb641410b8fc9e8ddbce723a669236162b9d9cfb70bd8d77"}, + {file = "cryptography-42.0.1-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:160fa08dfa6dca9cb8ad9bd84e080c0db6414ba5ad9a7470bc60fb154f60111e"}, + {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:727387886c9c8de927c360a396c5edcb9340d9e960cda145fca75bdafdabd24c"}, + {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d84673c012aa698555d4710dcfe5f8a0ad76ea9dde8ef803128cc669640a2e0"}, + {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e6edc3a568667daf7d349d7e820783426ee4f1c0feab86c29bd1d6fe2755e009"}, + {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:d50718dd574a49d3ef3f7ef7ece66ef281b527951eb2267ce570425459f6a404"}, + {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:9544492e8024f29919eac2117edd8c950165e74eb551a22c53f6fdf6ba5f4cb8"}, + {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ab6b302d51fbb1dd339abc6f139a480de14d49d50f65fdc7dff782aa8631d035"}, + {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2fe16624637d6e3e765530bc55caa786ff2cbca67371d306e5d0a72e7c3d0407"}, + {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ed1b2130f5456a09a134cc505a17fc2830a1a48ed53efd37dcc904a23d7b82fa"}, + {file = "cryptography-42.0.1-cp37-abi3-win32.whl", hash = "sha256:e5edf189431b4d51f5c6fb4a95084a75cef6b4646c934eb6e32304fc720e1453"}, + {file = "cryptography-42.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:6bfd823b336fdcd8e06285ae8883d3d2624d3bdef312a0e2ef905f332f8e9302"}, + {file = "cryptography-42.0.1-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:351db02c1938c8e6b1fee8a78d6b15c5ccceca7a36b5ce48390479143da3b411"}, + {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:430100abed6d3652208ae1dd410c8396213baee2e01a003a4449357db7dc9e14"}, + {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dff7a32880a51321f5de7869ac9dde6b1fca00fc1fef89d60e93f215468e824"}, + {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b512f33c6ab195852595187af5440d01bb5f8dd57cb7a91e1e009a17f1b7ebca"}, + {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:95d900d19a370ae36087cc728e6e7be9c964ffd8cbcb517fd1efb9c9284a6abc"}, + {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:6ac8924085ed8287545cba89dc472fc224c10cc634cdf2c3e2866fe868108e77"}, + {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cb2861a9364fa27d24832c718150fdbf9ce6781d7dc246a516435f57cfa31fe7"}, + {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25ec6e9e81de5d39f111a4114193dbd39167cc4bbd31c30471cebedc2a92c323"}, + {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:9d61fcdf37647765086030d81872488e4cb3fafe1d2dda1d487875c3709c0a49"}, + {file = "cryptography-42.0.1-cp39-abi3-win32.whl", hash = "sha256:16b9260d04a0bfc8952b00335ff54f471309d3eb9d7e8dbfe9b0bd9e26e67881"}, + {file = "cryptography-42.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:7911586fc69d06cd0ab3f874a169433db1bc2f0e40988661408ac06c4527a986"}, + {file = "cryptography-42.0.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d3594947d2507d4ef7a180a7f49a6db41f75fb874c2fd0e94f36b89bfd678bf2"}, + {file = "cryptography-42.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:8d7efb6bf427d2add2f40b6e1e8e476c17508fa8907234775214b153e69c2e11"}, + {file = "cryptography-42.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:126e0ba3cc754b200a2fb88f67d66de0d9b9e94070c5bc548318c8dab6383cb6"}, + {file = "cryptography-42.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:802d6f83233cf9696b59b09eb067e6b4d5ae40942feeb8e13b213c8fad47f1aa"}, + {file = "cryptography-42.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0b7cacc142260ada944de070ce810c3e2a438963ee3deb45aa26fd2cee94c9a4"}, + {file = "cryptography-42.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:32ea63ceeae870f1a62e87f9727359174089f7b4b01e4999750827bf10e15d60"}, + {file = "cryptography-42.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d3902c779a92151f134f68e555dd0b17c658e13429f270d8a847399b99235a3f"}, + {file = "cryptography-42.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:50aecd93676bcca78379604ed664c45da82bc1241ffb6f97f6b7392ed5bc6f04"}, + {file = "cryptography-42.0.1.tar.gz", hash = "sha256:fd33f53809bb363cf126bebe7a99d97735988d9b0131a2be59fbf83e1259a5b7"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] nox = ["nox"] -pep8test = ["black", "check-sdist", "mypy", "ruff"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] @@ -706,7 +714,6 @@ files = [ name = "diffsync" version = "1.10.0" description = "Library to easily sync/diff/update 2 different data sources" -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -1140,7 +1147,6 @@ dev = ["coverage", "coveralls", "pytest"] name = "exceptiongroup" version = "1.2.0" description = "Backport of PEP 654 (exception groups)" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1335,7 +1341,6 @@ colorama = ">=0.4" name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1347,7 +1352,6 @@ files = [ name = "httpcore" version = "0.17.3" description = "A minimal low-level HTTP client." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1359,17 +1363,16 @@ files = [ anyio = ">=3.0,<5.0" certifi = "*" h11 = ">=0.13,<0.15" -sniffio = ">=1.0.0,<2.0.0" +sniffio = "==1.*" [package.extras] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] [[package]] name = "httpx" version = "0.24.1" description = "The next generation HTTP client." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1385,9 +1388,9 @@ sniffio = "*" [package.extras] brotli = ["brotli", "brotlicffi"] -cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<14)"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] [[package]] name = "idna" @@ -2003,7 +2006,6 @@ mkdocstrings = ">=0.20" name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -2045,13 +2047,13 @@ typing-extensions = ">=4.3.0" [[package]] name = "nautobot" -version = "2.1.1" +version = "2.1.2" description = "Source of truth and network automation platform." optional = false python-versions = ">=3.8,<3.12" files = [ - {file = "nautobot-2.1.1-py3-none-any.whl", hash = "sha256:0b1592274bdb89b767266ec6b6837e67c2b82e2d7bf02308d6e2f877cf839731"}, - {file = "nautobot-2.1.1.tar.gz", hash = "sha256:62df1aa1a972396973df002b51f10dac7c76feeb52387fb94c1d10e41a2aa3e5"}, + {file = "nautobot-2.1.2-py3-none-any.whl", hash = "sha256:13fffb9ff7bf6dbee0df492256bc37060bea4229d71461b0b7447839bc35873a"}, + {file = "nautobot-2.1.2.tar.gz", hash = "sha256:185c1a1556c77f6ed5f2c9ed82aeea1f2b385b0ea2ceb480c78a6dbec8ef07d1"}, ] [package.dependencies] @@ -2078,16 +2080,17 @@ djangorestframework = ">=3.14.0,<3.15.0" drf-react-template-framework = ">=0.0.17,<0.0.18" drf-spectacular = {version = "0.26.3", extras = ["sidecar"]} emoji = ">=2.8.0,<2.9.0" -GitPython = ">=3.1.36,<3.2.0" +GitPython = ">=3.1.41,<3.2.0" graphene-django = ">=2.16.0,<2.17.0" graphene-django-optimizer = ">=0.8.0,<0.9.0" -Jinja2 = ">=3.1.2,<3.2.0" +Jinja2 = ">=3.1.3,<3.2.0" jsonschema = ">=4.7.0,<4.19.0" Markdown = ">=3.3.7,<3.4.0" MarkupSafe = ">=2.1.3,<2.2.0" netaddr = ">=0.8.0,<0.9.0" netutils = ">=1.6.0,<2.0.0" -packaging = ">=23.1,<23.2" +nh3 = ">=0.2.15,<0.3.0" +packaging = ">=23.1" Pillow = ">=10.0.0,<10.1.0" prometheus-client = ">=0.17.1,<0.18.0" psycopg2-binary = ">=2.9.9,<2.10.0" @@ -2109,7 +2112,6 @@ sso = ["social-auth-core[openidconnect,saml] (>=4.4.2,<4.5.0)"] name = "nautobot-plugin-nornir" version = "2.0.0" description = "Nautobot Nornir plugin." -category = "main" optional = false python-versions = ">=3.8,<3.12" files = [ @@ -2128,7 +2130,6 @@ nautobot = ["nautobot (>=2.0.0,<3.0.0)"] name = "nautobot-ssot" version = "2.2.0" description = "Nautobot Single Source of Truth" -category = "main" optional = false python-versions = ">=3.8,<3.12" files = [ @@ -2215,11 +2216,35 @@ files = [ [package.extras] optionals = ["jsonschema (>=4.17.3,<5.0.0)", "napalm (>=4.0.0,<5.0.0)"] +[[package]] +name = "nh3" +version = "0.2.15" +description = "Python bindings to the ammonia HTML sanitization library." +optional = false +python-versions = "*" +files = [ + {file = "nh3-0.2.15-cp37-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:9c0d415f6b7f2338f93035bba5c0d8c1b464e538bfbb1d598acd47d7969284f0"}, + {file = "nh3-0.2.15-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:6f42f99f0cf6312e470b6c09e04da31f9abaadcd3eb591d7d1a88ea931dca7f3"}, + {file = "nh3-0.2.15-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac19c0d68cd42ecd7ead91a3a032fdfff23d29302dbb1311e641a130dfefba97"}, + {file = "nh3-0.2.15-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f0d77272ce6d34db6c87b4f894f037d55183d9518f948bba236fe81e2bb4e28"}, + {file = "nh3-0.2.15-cp37-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:8d595df02413aa38586c24811237e95937ef18304e108b7e92c890a06793e3bf"}, + {file = "nh3-0.2.15-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86e447a63ca0b16318deb62498db4f76fc60699ce0a1231262880b38b6cff911"}, + {file = "nh3-0.2.15-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3277481293b868b2715907310c7be0f1b9d10491d5adf9fce11756a97e97eddf"}, + {file = "nh3-0.2.15-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60684857cfa8fdbb74daa867e5cad3f0c9789415aba660614fe16cd66cbb9ec7"}, + {file = "nh3-0.2.15-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3b803a5875e7234907f7d64777dfde2b93db992376f3d6d7af7f3bc347deb305"}, + {file = "nh3-0.2.15-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0d02d0ff79dfd8208ed25a39c12cbda092388fff7f1662466e27d97ad011b770"}, + {file = "nh3-0.2.15-cp37-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:f3b53ba93bb7725acab1e030bc2ecd012a817040fd7851b332f86e2f9bb98dc6"}, + {file = "nh3-0.2.15-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:b1e97221cedaf15a54f5243f2c5894bb12ca951ae4ddfd02a9d4ea9df9e1a29d"}, + {file = "nh3-0.2.15-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a5167a6403d19c515217b6bcaaa9be420974a6ac30e0da9e84d4fc67a5d474c5"}, + {file = "nh3-0.2.15-cp37-abi3-win32.whl", hash = "sha256:427fecbb1031db085eaac9931362adf4a796428ef0163070c484b5a768e71601"}, + {file = "nh3-0.2.15-cp37-abi3-win_amd64.whl", hash = "sha256:bc2d086fb540d0fa52ce35afaded4ea526b8fc4d3339f783db55c95de40ef02e"}, + {file = "nh3-0.2.15.tar.gz", hash = "sha256:d1e30ff2d8d58fb2a14961f7aac1bbb1c51f9bdd7da727be35c63826060b0bf3"}, +] + [[package]] name = "nornir" version = "3.4.1" description = "Pluggable multi-threaded framework with inventory management to help operate collections of devices" -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2236,7 +2261,6 @@ mypy_extensions = ">=1.0.0,<2.0.0" name = "nornir-jinja2" version = "0.2.0" description = "Jinja2 plugins for nornir" -category = "main" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -2252,7 +2276,6 @@ nornir = ">=3,<4" name = "nornir-napalm" version = "0.4.0" description = "NAPALM's plugins for nornir" -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -2268,7 +2291,6 @@ nornir = ">=3,<4" name = "nornir-nautobot" version = "3.1.0" description = "Nornir Nautobot" -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2294,7 +2316,6 @@ mikrotik-driver = ["routeros-api (>=0.17.0,<0.18.0)"] name = "nornir-netmiko" version = "1.0.1" description = "Netmiko's plugins for Nornir" -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2309,7 +2330,6 @@ netmiko = ">=4.0.0,<5.0.0" name = "nornir-utils" version = "0.2.0" description = "Collection of plugins and functions for nornir that don't require external dependencies" -category = "main" optional = false python-versions = ">=3.6.2,<4.0.0" files = [ @@ -2325,7 +2345,6 @@ nornir = ">=3,<4" name = "ntc-templates" version = "4.1.0" description = "TextFSM Templates for Network Devices, and Python wrapper for TextFSM's CliTable." -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2354,13 +2373,13 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "packaging" -version = "23.1" +version = "23.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, - {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] [[package]] @@ -2650,7 +2669,6 @@ files = [ name = "pydantic" version = "1.10.14" description = "Data validation and settings management using python type hints" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2900,17 +2918,17 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "pynautobot" -version = "2.0.1" +version = "2.0.2" description = "Nautobot API client library" -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ - {file = "pynautobot-2.0.1-py3-none-any.whl", hash = "sha256:14f9f05ef4c9f8918a56e4892c3badd3c25679aaf5cc6292adcebd7e1ba419c7"}, - {file = "pynautobot-2.0.1.tar.gz", hash = "sha256:de8bf725570baa5bee3a47e2a0de01605ab97e852e5f534b3d8e54a4ed6e2043"}, + {file = "pynautobot-2.0.2-py3-none-any.whl", hash = "sha256:c0533bcd5ab548d23273f6be49071f09a3dec7cd65ded3507be1707d25bb5f0e"}, + {file = "pynautobot-2.0.2.tar.gz", hash = "sha256:a62f7b35d4f3492a3cfb038abfc3272567dd1d4b88703ab2736db47f40263932"}, ] [package.dependencies] +packaging = ">=23.2,<24.0" requests = ">=2.30.0,<3.0.0" urllib3 = ">=1.21.1,<1.27" @@ -2976,13 +2994,13 @@ six = ">=1.5" [[package]] name = "python-slugify" -version = "8.0.1" +version = "8.0.2" description = "A Python slugify application that also handles Unicode" optional = false python-versions = ">=3.7" files = [ - {file = "python-slugify-8.0.1.tar.gz", hash = "sha256:ce0d46ddb668b3be82f4ed5e503dbc33dd815d83e2eb6824211310d3fb172a27"}, - {file = "python_slugify-8.0.1-py2.py3-none-any.whl", hash = "sha256:70ca6ea68fe63ecc8fa4fcf00ae651fc8a5d02d93dcd12ae6d4fc7ca46c4d395"}, + {file = "python-slugify-8.0.2.tar.gz", hash = "sha256:a1a02b127a95c124fd84f8f88be730e557fd823774bf19b1cd5e8704e2ae0e5e"}, + {file = "python_slugify-8.0.2-py2.py3-none-any.whl", hash = "sha256:428ea9b00c977b8f6c097724398f190b2c18e2a6011094d1001285875ccacdbf"}, ] [package.dependencies] @@ -3450,7 +3468,6 @@ files = [ name = "ruamel-yaml" version = "0.18.5" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3469,7 +3486,6 @@ jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] name = "ruamel-yaml-clib" version = "0.2.8" description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3606,7 +3622,6 @@ files = [ name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3642,13 +3657,13 @@ social-auth-core = ">=4.4.1" [[package]] name = "social-auth-core" -version = "4.5.1" +version = "4.5.2" description = "Python social authentication made simple." optional = false python-versions = ">=3.8" files = [ - {file = "social-auth-core-4.5.1.tar.gz", hash = "sha256:307a4ba64d4f3ec86e4389163eac1d8b8656ffe5ab2e964aeff043ab00b3a662"}, - {file = "social_auth_core-4.5.1-py3-none-any.whl", hash = "sha256:54d0c598bf6ea0ec12bbcf78bee035c7cd604b5d781d80b7997e9e033c3ac05d"}, + {file = "social-auth-core-4.5.2.tar.gz", hash = "sha256:e313bfd09ad78a4af44c5630f3770776b24f468e9a5b71160ade9583efa43f8a"}, + {file = "social_auth_core-4.5.2-py3-none-any.whl", hash = "sha256:47b48be9b6da59aed4792d805cc25f4c7b7f57e0bbf86d659b5df0ff3f253109"}, ] [package.dependencies] @@ -3700,7 +3715,6 @@ pbr = ">=2.0.0,<2.1.0 || >2.1.0" name = "structlog" version = "22.3.0" description = "Structured Logging for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ From 00f6d5aa9a1a9b8746225ae4ed5cc7f8b8b17792 Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Sun, 28 Jan 2024 11:47:28 -0700 Subject: [PATCH 019/225] updated formatting --- .../diffsync/adapters/onboarding_adapters.py | 15 ++++++----- nautobot_device_onboarding/jobs.py | 24 ----------------- .../nornir_plays/processor.py | 27 ++++++++++--------- .../{nornir_plays => utils}/formatter.py | 24 +++++++++-------- 4 files changed, 35 insertions(+), 55 deletions(-) rename nautobot_device_onboarding/{nornir_plays => utils}/formatter.py (83%) diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index c7d82145..2cabc1da 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -73,7 +73,7 @@ def load_platforms(self): name=platform.name, network_driver=platform.network_driver, manufacturer__name=platform.manufacturer.name, - ) + ) # type: ignore self.add(onboarding_platform) if self.job.debug: self.job.logger.debug(f"Platform: {platform.name} loaded.") @@ -87,7 +87,7 @@ def load_device_types(self): model=device_type.model, part_number=device_type.model, manufacturer__name=device_type.manufacturer.name, - ) + ) # type: ignore self.add(onboarding_device_type) if self.job.debug: self.job.logger.debug(f"DeviceType: {device_type.model} loaded.") @@ -118,7 +118,7 @@ def load_devices(self): interfaces=interface_list, mask_length=device.primary_ip4.mask_length if device.primary_ip4 else None, serial=device.serial, - ) + ) # type: ignore self.add(onboarding_device) if self.job.debug: self.job.logger.debug(f"Device: {device.name} loaded.") @@ -188,6 +188,7 @@ def execute_command_getter(self): else: break self.device_data = result.result + self.job.logger.debug(f"Command Getter Job Result: {self.device_data}") def load_manufacturers(self): """Load manufacturer data into a DiffSync model.""" @@ -197,7 +198,7 @@ def load_manufacturers(self): onboarding_manufacturer = self.manufacturer( diffsync=self, name=self.device_data[ip_address]["manufacturer"], - ) + ) # type: ignore try: self.add(onboarding_manufacturer) except diffsync.ObjectAlreadyExists: @@ -213,7 +214,7 @@ def load_platforms(self): name=self.device_data[ip_address]["platform"], manufacturer__name=self.device_data[ip_address]["manufacturer"], network_driver=self.device_data[ip_address]["network_driver"], - ) + ) # type: ignore try: self.add(onboarding_platform) except diffsync.ObjectAlreadyExists: @@ -229,7 +230,7 @@ def load_device_types(self): model=self.device_data[ip_address]["device_type"], part_number=self.device_data[ip_address]["device_type"], manufacturer__name=self.device_data[ip_address]["manufacturer"], - ) + ) # type: ignore try: self.add(onboarding_device_type) except diffsync.ObjectAlreadyExists: @@ -258,7 +259,7 @@ def load_devices(self): interfaces=[self.device_data[ip_address]["mgmt_interface"]], mask_length=self.device_data[ip_address]["mask_length"], serial=self.device_data[ip_address]["serial"], - ) + ) # type: ignore try: self.add(onboarding_device) if self.job.debug: diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 48031ba0..207e9ead 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -356,18 +356,6 @@ class Meta: # pylint: disable=too-few-public-methods description = "Login to a device(s) and run commands." has_sensitive_variables = False hidden = False - - # def __init__(self, *args, **kwargs): - # """Initialize Command Getter Job.""" - # self.username = None - # self.password = None - # self.secret = None - # self.secrets_group = None - # self.ip4address = None - # self.platform = None - # self.port = None - # self.timeout = None - # super().__init__(*args, **kwargs) debug = BooleanVar( default=False, @@ -430,20 +418,8 @@ class Meta: # pylint: disable=too-few-public-methods ) def run(self, *args, **kwargs): - # mock_job_data = { - # "ip4address": "174.51.52.76", - # "platform": "cisco_nxos", - # "secrets_group": SecretsGroup.objects.get(name="NW_CREDS"), - # "port": 8022, - # "timeout": 30, - # } """Process onboarding task from ssot-ni job.""" - # self.ip4address = mock_job_data["ip4address"] - # self.secrets_group = mock_job_data["secrets_group"] - # self.platform = mock_job_data["platform"] - # self.port = mock_job_data["port"] - # self.timeout = mock_job_data["timeout"] self.ip_addresses = kwargs["ip_addresses"].replace(" ", "").split(",") self.port = kwargs["port"] diff --git a/nautobot_device_onboarding/nornir_plays/processor.py b/nautobot_device_onboarding/nornir_plays/processor.py index 96e620ba..1a7e1d0e 100755 --- a/nautobot_device_onboarding/nornir_plays/processor.py +++ b/nautobot_device_onboarding/nornir_plays/processor.py @@ -5,7 +5,8 @@ from nornir.core.task import AggregatedResult, MultiResult, Task from nornir_nautobot.exceptions import NornirNautobotException from nornir_nautobot.plugins.processors import BaseLoggingProcessor -from nautobot_device_onboarding.nornir_plays.formatter import format_ob_data_ios, format_ob_data_nxos +from nautobot_device_onboarding.utils.formatter import format_ob_data_ios, format_ob_data_nxos + class ProcessorDO(BaseLoggingProcessor): """Processor class for Device Onboarding jobs.""" @@ -43,16 +44,14 @@ def task_instance_completed(self, task: Task, host: Host, result: MultiResult) - if result.failed: for level_1_result in result: if hasattr(level_1_result, "exception") and hasattr(level_1_result.exception, "result"): - for level_2_result in level_1_result.exception.result: + for level_2_result in level_1_result.exception.result: # type: ignore if isinstance(level_2_result.exception, NornirNautobotException): return self.logger.critical(f"{task.name} failed: {result.exception}", extra={"object": task.host}) else: - self.logger.info( - f"Task Name: {task.name} Task Result: {result.result}", extra={"object": task.host} - ) - - self.data[host.name] = { + self.logger.info(f"Task Name: {task.name} Task Result: {result.result}", extra={"object": task.host}) + + self.data[task.name][host.name] = { "completed": True, "failed": result.failed, } @@ -61,26 +60,28 @@ def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult """Processor for Logging on SubTask Completed.""" self.logger.info(f"Subtask completed {task.name}.", extra={"object": task.host}) self.logger.info(f"Subtask result {result.result}.", extra={"object": task.host}) + self.data[task.name][host.name] = { + "failed": result.failed, + "subtask_result": result.result, + } if host.name not in self.data: self.data[host.name] = { "platform": host.platform, "manufacturer": host.platform.split("_")[0].title() if host.platform else "PLACEHOLDER", } - if host.platform == "cisco_ios": - formatted_data = format_ob_data_ios(host, result) - elif host.platform == "cisco_xe": + if host.platform in ["cisco_ios", "cisco_xe"]: formatted_data = format_ob_data_ios(host, result) elif host.platform == "cisco_nxos": formatted_data = format_ob_data_nxos(host, result) else: formatted_data = {} + self.logger.info(f"No formatter for {host.platform}.", extra={"object": task.host}) self.data[host.name].update(formatted_data) - - def subtask_instance_started(self, task: Task, host: Host) -> None: """Processor for Logging on SubTask Start.""" self.logger.info(f"Subtask starting {task.name}.", extra={"object": task.host}) - \ No newline at end of file + self.data[task.name] = {} + self.data[task.name][host.name] = {"started": True} diff --git a/nautobot_device_onboarding/nornir_plays/formatter.py b/nautobot_device_onboarding/utils/formatter.py similarity index 83% rename from nautobot_device_onboarding/nornir_plays/formatter.py rename to nautobot_device_onboarding/utils/formatter.py index 4a41ec20..9068cdc1 100644 --- a/nautobot_device_onboarding/nornir_plays/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -2,7 +2,7 @@ def format_ob_data_ios(host, result): """Format the data for onboarding IOS devices.""" primary_ip4 = host.name formatted_data = {} - + for r in result: if r.name == "show inventory": device_type = r.result[0].get("pid") @@ -10,7 +10,7 @@ def format_ob_data_ios(host, result): elif r.name == "show version": hostname = r.result[0].get("hostname") serial = r.result[0].get("serial") - formatted_data["hostname"] = hostname + formatted_data["hostname"] = hostname formatted_data["serial"] = serial[0] elif r.name == "show interfaces": show_interfaces = r.result @@ -19,20 +19,20 @@ def format_ob_data_ios(host, result): mask_length = interface.get("prefix_length") interface_name = interface.get("interface") formatted_data["mask_length"] = mask_length - formatted_data["interface_name"] = interface_name - + formatted_data["mgmt_interface"] = interface_name + return formatted_data -# TODO: Add NXOS formatter, others if necessary + def format_ob_data_nxos(host, result): """Format the data for onboarding NXOS devices.""" primary_ip4 = host.name formatted_data = {} - + for r in result: if r.name == "show inventory": # TODO: Add check for PID when textfsm template is fixed - pass + pass elif r.name == "show version": device_type = r.result[0].get("platform") formatted_data["device_type"] = device_type @@ -40,16 +40,18 @@ def format_ob_data_nxos(host, result): serial = r.result[0].get("serial") formatted_data["hostname"] = hostname if serial: - formatted_data["serial"] = serial[0] + formatted_data["serial"] = serial else: formatted_data["serial"] = "" - elif r.name == "show interfaces": + elif r.name == "show interface": show_interfaces = r.result + print(f"show interfaces {show_interfaces}") for interface in show_interfaces: if interface.get("ip_address") == primary_ip4: mask_length = interface.get("prefix_length") interface_name = interface.get("interface") formatted_data["mask_length"] = mask_length - formatted_data["interface_name"] = interface_name - + formatted_data["mgmt_interface"] = interface_name + break + return formatted_data From 62dca43ce668c214cf04a5d6dd94906594fb4a92 Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Mon, 29 Jan 2024 09:34:46 -0700 Subject: [PATCH 020/225] updated formatting --- .../diffsync/adapters/onboarding_adapters.py | 8 +++-- nautobot_device_onboarding/jobs.py | 18 ++++++++---- .../nornir_plays/processor.py | 29 +++++++++++++------ .../utils/inventory_creator.py | 1 + 4 files changed, 39 insertions(+), 17 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index 2cabc1da..0e7ce30d 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -71,8 +71,8 @@ def load_platforms(self): onboarding_platform = self.platform( diffsync=self, name=platform.name, - network_driver=platform.network_driver, - manufacturer__name=platform.manufacturer.name, + network_driver=platform.network_driver if platform.network_driver else "", + manufacturer__name=platform.manufacturer.name if platform.manufacturer else None, ) # type: ignore self.add(onboarding_platform) if self.job.debug: @@ -188,7 +188,9 @@ def execute_command_getter(self): else: break self.device_data = result.result - self.job.logger.debug(f"Command Getter Job Result: {self.device_data}") + if self.job.debug: + self.job.logger.debug(f"Command Getter Job Result: {self.device_data}") + def load_manufacturers(self): """Load manufacturer data into a DiffSync model.""" diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index dd3f34c2..520c7aa0 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -481,7 +481,15 @@ class Meta: # pylint: disable=too-few-public-methods required=False, description="Device platform. Define ONLY to override auto-recognition of platform.", ) - + def process_command_getter_result(self, command_result, ip_addresses): + """Process the data returned from CommandGetterDO""" + processed_device_data = {} + for ip_address in ip_addresses: + processed_device_data[ip_address] = command_result[ip_address] + if self.debug: + self.logger.debug(f"Processed CommandGetterDO return for {ip_address}: {command_result[ip_address]}") + return processed_device_data + def run(self, *args, **kwargs): """Process onboarding task from ssot-ni job.""" @@ -505,18 +513,18 @@ def run(self, *args, **kwargs): ip_address = self.ip_addresses inventory_constructed = _set_inventory(ip_address, self.platform, self.port, self.secrets_group) nr_with_processors.inventory.hosts.update(inventory_constructed) + nr_with_processors.run(task=netmiko_send_commands) + final_result = self.process_command_getter_result(compiled_results, self.ip_addresses) #### Remove before final merge #### for host, data in nr_with_processors.inventory.hosts.items(): self.logger.info("%s;\n%s", host, data.dict()) #### End #### - - nr_with_processors.run(task=netmiko_send_commands) - + except Exception as err: self.logger.info("Error: %s", err) return err - return compiled_results + return final_result jobs = [OnboardingTask, SSOTDeviceOnboarding, SSOTNetworkImporter, CommandGetterDO] diff --git a/nautobot_device_onboarding/nornir_plays/processor.py b/nautobot_device_onboarding/nornir_plays/processor.py index 1a7e1d0e..0ac71ebe 100755 --- a/nautobot_device_onboarding/nornir_plays/processor.py +++ b/nautobot_device_onboarding/nornir_plays/processor.py @@ -18,15 +18,17 @@ def __init__(self, logger, command_outputs): def task_started(self, task: Task) -> None: self.data[task.name] = {} - self.data[task.name]["started"] = True + #self.data[task.name]["started"] = True + self.logger.info(f"Task Name: {task.name} started") def task_completed(self, task: Task, result: AggregatedResult) -> None: - self.data[task.name]["completed"] = True + #self.data[task.name]["completed"] = True + self.logger.info(f"Task Name: {task.name} completed") def task_instance_started(self, task: Task, host: Host) -> None: """Processor for Logging on Task Start.""" self.logger.info(f"Starting {task.name}.", extra={"object": task.host}) - self.data[task.name][host.name] = {"started": True} + self.data[task.name][host.name] = {} def task_instance_completed(self, task: Task, host: Host, result: MultiResult) -> None: """Nornir processor task completion for OS upgrades. @@ -51,23 +53,32 @@ def task_instance_completed(self, task: Task, host: Host, result: MultiResult) - else: self.logger.info(f"Task Name: {task.name} Task Result: {result.result}", extra={"object": task.host}) - self.data[task.name][host.name] = { - "completed": True, - "failed": result.failed, - } + # self.data[task.name][host.name] = { + # "completed": True, + # "failed": result.failed, + # } def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult) -> None: """Processor for Logging on SubTask Completed.""" self.logger.info(f"Subtask completed {task.name}.", extra={"object": task.host}) self.logger.info(f"Subtask result {result.result}.", extra={"object": task.host}) + self.data[task.name][host.name] = { "failed": result.failed, "subtask_result": result.result, } - if host.name not in self.data: + self.logger.info(f" self.data: {self.data}") + + if self.data[task.name][host.name].get("failed"): + self.data[host.name] = { + "failed": True, + "subtask_result": result.result, + } + elif host.name not in self.data: self.data[host.name] = { "platform": host.platform, "manufacturer": host.platform.split("_")[0].title() if host.platform else "PLACEHOLDER", + "network_driver": host.platform, } if host.platform in ["cisco_ios", "cisco_xe"]: @@ -84,4 +95,4 @@ def subtask_instance_started(self, task: Task, host: Host) -> None: """Processor for Logging on SubTask Start.""" self.logger.info(f"Subtask starting {task.name}.", extra={"object": task.host}) self.data[task.name] = {} - self.data[task.name][host.name] = {"started": True} + #self.data[task.name][host.name] = {"started": True} diff --git a/nautobot_device_onboarding/utils/inventory_creator.py b/nautobot_device_onboarding/utils/inventory_creator.py index 8ee688fa..310b789b 100755 --- a/nautobot_device_onboarding/utils/inventory_creator.py +++ b/nautobot_device_onboarding/utils/inventory_creator.py @@ -56,6 +56,7 @@ def guess_netmiko_device_type(hostname, username, password, port): except Exception as err: print(err) + print(f"************************Guessed device type: {guessed_device_type}") return guessed_device_type From 28104c0f7ca5449b6c690bbacfc78067094a527e Mon Sep 17 00:00:00 2001 From: David Cates Date: Mon, 29 Jan 2024 10:28:26 -0700 Subject: [PATCH 021/225] update ssot integration --- .../diffsync/adapters/onboarding_adapters.py | 23 +++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index 0e7ce30d..76850664 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -164,6 +164,25 @@ def _validate_ip_addresses(self, ip_addresses): else: raise netaddr.AddrConversionError + def _handle_failed_connections(self, device_data): + """ + Handle result data from failed device connections. + + If a device fails to return expected data, log the result + and remove it from the data to be loaded into the diffsync store. + """ + failed_ip_addresses = [] + + for ip_address in device_data: + if device_data[ip_address].get("failed"): + self.job.logger.error(f"Failed to connect to {ip_address}. This device will not be onboarded.") + if self.job.debug: + self.job.logger.debug(device_data[ip_address].get("subtask_result")) + failed_ip_addresses.append(ip_address) + for ip_address in failed_ip_addresses: + del device_data[ip_address] + self.device_data = device_data + def execute_command_getter(self): if self.job.platform: if not self.job.platform.network_driver: @@ -187,9 +206,9 @@ def execute_command_getter(self): result.refresh_from_db() else: break - self.device_data = result.result if self.job.debug: - self.job.logger.debug(f"Command Getter Job Result: {self.device_data}") + self.job.logger.debug(f"Command Getter Job Result: {result.result}") + self._handle_failed_connections(device_data=result.result) def load_manufacturers(self): From 594010f1730e85c0b352cd462b5be5acc29c9e74 Mon Sep 17 00:00:00 2001 From: David Cates Date: Mon, 29 Jan 2024 15:36:12 -0700 Subject: [PATCH 022/225] update ssot integration --- .../adapters/network_importer_adapters.py | 35 +++- .../diffsync/adapters/onboarding_adapters.py | 5 +- .../models/network_importer_models.py | 8 +- nautobot_device_onboarding/jobs.py | 198 ++++++++---------- 4 files changed, 128 insertions(+), 118 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index d99fe7e5..1d60a93e 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -5,6 +5,36 @@ import diffsync +####################################### +# FOR TESTING ONLY - TO BE REMOVED # +####################################### +mock_data = { + "demo-cisco-xe1": { + "serial": "9ABUXU581111", + "interfaces": { + "GigabitEthernet1": { + "mgmt_only": True, + "ip_addresses": ["10.1.1.8"], + }, + "GigabitEthernet2": { + "mgmt_only": False, + "ip_addresses": ["10.1.1.9"], + }, + "GigabitEthernet3": { + "mgmt_only": False, + "ip_addresses": ["10.1.1.10, 10.1.1.11"], + }, + "GigabitEthernet4": { + "mgmt_only": False, + "ip_addresses": [], + }, + } + }, +} +####################################### +###################################### + + class FilteredNautobotAdapter(NautobotAdapter): """ Allow for filtering of data loaded from Nautobot into DiffSync models. @@ -29,7 +59,7 @@ class NetworkImporterNautobotAdapter(FilteredNautobotAdapter): interface = network_importer_models.NetworkImporterInterface ip_address = network_importer_models.NetworkImporterIPAddress - top_level = ["ip_address", "interface", "device"] + top_level = ["device"] class NetworkImporterNetworkAdapter(diffsync.DiffSync): @@ -45,7 +75,8 @@ def __init__(self, *args, job, sync=None, **kwargs): interface = network_importer_models.NetworkImporterInterface ip_address = network_importer_models.NetworkImporterIPAddress - top_level = ["ip_address", "interface", "device"] + top_level = ["device"] + device_data = mock_data def load_devices(self): pass diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index 76850664..5a95b754 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -193,12 +193,13 @@ def execute_command_getter(self): raise Exception("Platform.network_driver missing") command_getter_job = Job.objects.get(name="Command Getter for Device Onboarding") + job_kwargs = self.job.prepare_job_kwargs(self.job.job_result.task_kwargs) + kwargs = self.job.serialize_data(job_kwargs) result = JobResult.enqueue_job( job_model=command_getter_job, user=self.job.user, celery_kwargs=self.job.job_result.celery_kwargs, - *self.job.job_result.task_args, - **self.job.job_result.task_kwargs + **kwargs ) while True: if result.status not in JobResultStatusChoices.READY_STATES: diff --git a/nautobot_device_onboarding/diffsync/models/network_importer_models.py b/nautobot_device_onboarding/diffsync/models/network_importer_models.py index 3f33eef7..1249b9ad 100644 --- a/nautobot_device_onboarding/diffsync/models/network_importer_models.py +++ b/nautobot_device_onboarding/diffsync/models/network_importer_models.py @@ -40,11 +40,14 @@ class NetworkImporterDevice(FilteredNautobotModel): "name", "serial", ) + _children = {"interface": "interfaces"} name: str location__name: str serial: str + interfaces: List["NetworkImporterInterface"] = [] + @classmethod def _get_queryset(cls, diffsync: "DiffSync"): """Get the queryset used to load the models data from Nautobot.""" @@ -74,10 +77,13 @@ class NetworkImporterInterface(FilteredNautobotModel): "device__name", "name", ) - + _children = {"ip_address": "ip_addresses"} device__name: str name: str + ip_addresses: List["NetworkImporterIPAddress"] = [] + + class NetworkImporterIPAddress(FilteredNautobotModel): _modelname = "ip_address" _model = IPAddress diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 520c7aa0..a7746191 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -294,41 +294,62 @@ def load_target_adapter(self): self.target_adapter = OnboardingNautobotAdapter(job=self, sync=self.sync) self.target_adapter.load() - def run(self, dryrun, memory_profiling, *args, **kwargs): # pylint:disable=arguments-differ + def run( + self, + dryrun, + memory_profiling, + debug, + location, + namespace, + ip_addresses, + management_only_interface, + update_devices_without_primary_ip, + device_role, + device_status, + interface_status, + ip_address_status, + port, + timeout, + secrets_group, + platform, + *args, + **kwargs + ): # pylint:disable=arguments-differ """Run sync.""" self.dryrun = dryrun self.memory_profiling = memory_profiling - self.debug = kwargs["debug"] - self.location = kwargs["location"] - self.namespace = kwargs["namespace"] - self.ip_addresses = kwargs["ip_addresses"].replace(" ", "").split(",") - self.management_only_interface = kwargs["management_only_interface"] - self.update_devices_without_primary_ip = kwargs["update_devices_without_primary_ip"] - self.device_role = kwargs["device_role"] - self.device_status = kwargs["device_status"] - self.interface_status = kwargs["interface_status"] - self.ip_address_status = kwargs["ip_address_status"] - self.port = kwargs["port"] - self.timeout = kwargs["timeout"] - self.secrets_group = kwargs["secrets_group"] - self.platform = kwargs["platform"] - - nautobot_object_models = [ - "location", - "namespace", - "device_role", - "device_status", - "interface_status", - "ip_address_status", - "secrets_group", - "platform", - ] - # Convert model instances into IDs, necessary for sending form inputs to the worker for use in other jobs - for model in nautobot_object_models: - kwargs[model] = kwargs[model].id if kwargs[model] else None - - self.job_result.task_kwargs = kwargs + self.debug = debug + self.location = location + self.namespace = namespace + self.ip_addresses = ip_addresses.replace(" ", "").split(",") + self.management_only_interface = management_only_interface + self.update_devices_without_primary_ip = update_devices_without_primary_ip + self.device_role = device_role + self.device_status = device_status + self.interface_status = interface_status + self.ip_address_status = ip_address_status + self.port = port + self.timeout = timeout + self.secrets_group = secrets_group + self.platform = platform + + self.job_result.task_kwargs = { + "debug": debug, + "location": location, + "namespace": namespace, + "ip_addresses": ip_addresses, + "management_only_interface": management_only_interface, + "update_devices_without_primary_ip": update_devices_without_primary_ip, + "update_devices_without_primary_ip": device_role, + "device_status": device_status, + "interface_status": interface_status, + "ip_address_status": ip_address_status, + "port": port, + "timeout": timeout, + "secrets_group": secrets_group, + "platform": platform, + } super().run(dryrun, memory_profiling, *args, **kwargs) class SSOTNetworkImporter(DataSource): @@ -349,7 +370,6 @@ class Meta: ) debug = BooleanVar(description="Enable for more verbose logging.") - devices = MultiObjectVar( model=Device, required=False, @@ -384,30 +404,35 @@ def load_target_adapter(self): self.target_adapter = NetworkImporterNautobotAdapter(job=self, sync=self.sync) self.target_adapter.load() - def run(self, dryrun, memory_profiling, *args, **kwargs): # pylint:disable=arguments-differ + def run( + self, + dryrun, + memory_profiling, + debug, + location, + devices, + device_role, + tag, + *args, + **kwargs): # pylint:disable=arguments-differ """Run sync.""" self.dryrun = dryrun self.memory_profiling = memory_profiling - self.debug = kwargs["debug"] - self.location = kwargs["location"] - self.devices = kwargs["devices"] - self.device_role = kwargs["device_role"] - self.tag = kwargs["tag"] - - nautobot_object_models = [ - "location", - "devices", - "device_role", - "device_role", - "tag", - ] - # Convert model instances into IDs, necessary for sending form inputs to the worker for use in other jobs - # TODO: MultiObjectVars need to be converted to ids for transver to the command getter - # for model in nautobot_object_models: - # kwargs[model] = kwargs[model].id if kwargs[model] else None - - self.job_result.task_kwargs = kwargs + self.debug = debug + self.location = location + self.devices = devices + self.device_role = device_role + self.tag = tag + + self.job_result.task_kwargs = { + "debug": debug, + "location": location, + "devices": devices, + "device_role": device_role, + "tag": tag, + } + super().run(dryrun, memory_profiling, *args, **kwargs) @@ -421,66 +446,14 @@ class Meta: # pylint: disable=too-few-public-methods description = "Login to a device(s) and run commands." has_sensitive_variables = False hidden = False - - debug = BooleanVar( - default=False, - description="Enable for more verbose logging.", - ) - location = ObjectVar( - model=Location, - query_params={"content_type": "dcim.device"}, - description="Assigned Location for the onboarded device(s)", - ) - namespace = ObjectVar(model=Namespace, description="Namespace ip addresses belong to.") - ip_addresses = StringVar( - description="IP Address of the device to onboard, specify in a comma separated list for multiple devices.", - label="IPv4 Addresses", - ) - management_only_interface = BooleanVar( - default=False, - label="Set Management Only", - description="If True, interfaces that are created or updated will be set to management only. If False, the interface will be set to not be management only.", - ) - update_devices_without_primary_ip = BooleanVar( - default=False, - description="If a device at the specified location already exists in Nautobot but the primary ip address " - "does not match an ip address entered, update this device with the sync." - ) - device_role = ObjectVar( - model=Role, - query_params={"content_types": "dcim.device"}, - required=True, - description="Role to be applied to all new onboarded devices", - ) - device_status = ObjectVar( - model=Status, - query_params={"content_types": "dcim.device"}, - required=True, - description="Status to be applied to all new onboarded devices", - ) - interface_status = ObjectVar( - model=Status, - query_params={"content_types": "dcim.interface"}, - required=True, - description="Status to be applied to all new onboarded device interfaces", - ) - ip_address_status = ObjectVar( - label="IP address status", - model=Status, - query_params={"content_types": "ipam.ipaddress"}, - required=True, - description="Status to be applied to all new onboarded IP addresses.", - ) - port = IntegerVar(default=22) - timeout = IntegerVar(default=30) - secrets_group = ObjectVar( - model=SecretsGroup, required=True, description="SecretsGroup for device connection credentials." - ) - platform = ObjectVar( - model=Platform, - required=False, - description="Device platform. Define ONLY to override auto-recognition of platform.", - ) + + debug = BooleanVar() + ip_addresses = StringVar() + port = IntegerVar() + timeout = IntegerVar() + secrets_group = ObjectVar(model=SecretsGroup) + platform = ObjectVar(model=Platform) + def process_command_getter_result(self, command_result, ip_addresses): """Process the data returned from CommandGetterDO""" processed_device_data = {} @@ -491,7 +464,6 @@ def process_command_getter_result(self, command_result, ip_addresses): return processed_device_data def run(self, *args, **kwargs): - """Process onboarding task from ssot-ni job.""" self.ip_addresses = kwargs["ip_addresses"].replace(" ", "").split(",") From 62c8db8a6c4643bbec4ed7feac43ab1602c8ac93 Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Mon, 29 Jan 2024 16:01:08 -0700 Subject: [PATCH 023/225] updated args --- nautobot_device_onboarding/utils/inventory_creator.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/nautobot_device_onboarding/utils/inventory_creator.py b/nautobot_device_onboarding/utils/inventory_creator.py index 310b789b..1c2a5f35 100755 --- a/nautobot_device_onboarding/utils/inventory_creator.py +++ b/nautobot_device_onboarding/utils/inventory_creator.py @@ -39,14 +39,13 @@ def guess_netmiko_device_type(hostname, username, password, port): """Guess the device type of host, based on Netmiko.""" guessed_device_type = None - netmiko_optional_args = {} + netmiko_optional_args = {"port": port} remote_device = { "device_type": "autodetect", "host": hostname, "username": username, "password": password, - "port": port **netmiko_optional_args, } From 32c7a96b0d63ba49163e7b9343dfab628e6fb49e Mon Sep 17 00:00:00 2001 From: David Cates Date: Mon, 29 Jan 2024 16:42:40 -0700 Subject: [PATCH 024/225] black, isort, pydocstyle --- development/nautobot_config.py | 1 + nautobot_device_onboarding/__init__.py | 1 + .../adapters/network_importer_adapters.py | 18 ++-- .../diffsync/adapters/onboarding_adapters.py | 39 ++++--- .../models/network_importer_models.py | 25 +++-- .../diffsync/models/onboarding_models.py | 15 ++- nautobot_device_onboarding/jobs.py | 102 +++++++++--------- nautobot_device_onboarding/nautobot_keeper.py | 11 +- nautobot_device_onboarding/netdev_keeper.py | 11 +- .../nornir_plays/command_getter.py | 6 +- .../nornir_plays/empty_inventory.py | 1 + .../nornir_plays/logger.py | 3 +- .../nornir_plays/processor.py | 14 ++- nautobot_device_onboarding/utils/formatter.py | 3 + .../utils/inventory_creator.py | 6 +- 15 files changed, 137 insertions(+), 119 deletions(-) diff --git a/development/nautobot_config.py b/development/nautobot_config.py index aca04327..f255413d 100644 --- a/development/nautobot_config.py +++ b/development/nautobot_config.py @@ -1,4 +1,5 @@ """Nautobot development configuration file.""" + import os import sys diff --git a/nautobot_device_onboarding/__init__.py b/nautobot_device_onboarding/__init__.py index 3647ea33..37411845 100644 --- a/nautobot_device_onboarding/__init__.py +++ b/nautobot_device_onboarding/__init__.py @@ -1,4 +1,5 @@ """App declaration for nautobot_device_onboarding.""" + # Metadata is inherited from Nautobot. If not including Nautobot in the environment, this should be added from importlib import metadata diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index 1d60a93e..3e7a7f64 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -1,9 +1,9 @@ """DiffSync adapters.""" -from nautobot_ssot.contrib import NautobotAdapter -from nautobot_device_onboarding.diffsync.models import network_importer_models import diffsync +from nautobot_ssot.contrib import NautobotAdapter +from nautobot_device_onboarding.diffsync.models import network_importer_models ####################################### # FOR TESTING ONLY - TO BE REMOVED # @@ -28,7 +28,7 @@ "mgmt_only": False, "ip_addresses": [], }, - } + }, }, } ####################################### @@ -41,14 +41,16 @@ class FilteredNautobotAdapter(NautobotAdapter): Must be used with FilteredNautobotModel. """ - + def _load_objects(self, diffsync_model): """Given a diffsync model class, load a list of models from the database and return them.""" parameter_names = self._get_parameter_names(diffsync_model) for database_object in diffsync_model._get_queryset(diffsync=self): - self.job.logger.debug(f"LOADING: Database Object: {database_object}, " - f"Model Name: {diffsync_model._modelname}, " - f"Parameter Names: {parameter_names}") + self.job.logger.debug( + f"LOADING: Database Object: {database_object}, " + f"Model Name: {diffsync_model._modelname}, " + f"Parameter Names: {parameter_names}" + ) self._load_single_object(database_object, diffsync_model, parameter_names) @@ -79,7 +81,9 @@ def __init__(self, *args, job, sync=None, **kwargs): device_data = mock_data def load_devices(self): + """Load device data from network devices.""" pass def load(self): + """Load network data.""" self.load_devices() diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index 5a95b754..1d09e817 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -2,13 +2,13 @@ import time +import diffsync import netaddr -from nautobot.extras.models import JobResult, Job -from nautobot.dcim.models import Device, DeviceType, Manufacturer, Platform -from nautobot_device_onboarding.diffsync.models import onboarding_models from nautobot.apps.choices import JobResultStatusChoices +from nautobot.dcim.models import Device, DeviceType, Manufacturer, Platform +from nautobot.extras.models import Job, JobResult -import diffsync +from nautobot_device_onboarding.diffsync.models import onboarding_models ####################################### # FOR TESTING ONLY - TO BE REMOVED # @@ -56,6 +56,7 @@ def __init__(self, job, sync, *args, **kwargs): self.sync = sync def load_manufacturers(self): + """Load manufacturer data from Nautobot.""" for manufacturer in Manufacturer.objects.all(): if self.job.debug: self.job.logger.debug(f"Loading Manufacturer data from Nautobot...") @@ -65,6 +66,7 @@ def load_manufacturers(self): self.job.logger.debug(f"Manufacturer: {manufacturer.name} loaded.") def load_platforms(self): + """Load platform data from Nautobot.""" if self.job.debug: self.job.logger.debug(f"Loading Platform data from Nautobot...") for platform in Platform.objects.all(): @@ -73,12 +75,13 @@ def load_platforms(self): name=platform.name, network_driver=platform.network_driver if platform.network_driver else "", manufacturer__name=platform.manufacturer.name if platform.manufacturer else None, - ) # type: ignore + ) # type: ignore self.add(onboarding_platform) if self.job.debug: self.job.logger.debug(f"Platform: {platform.name} loaded.") def load_device_types(self): + """Load device type data from Nautobot.""" if self.job.debug: self.job.logger.debug(f"Loading DeviceType data from Nautobot...") for device_type in DeviceType.objects.all(): @@ -87,12 +90,13 @@ def load_device_types(self): model=device_type.model, part_number=device_type.model, manufacturer__name=device_type.manufacturer.name, - ) # type: ignore + ) # type: ignore self.add(onboarding_device_type) if self.job.debug: self.job.logger.debug(f"DeviceType: {device_type.model} loaded.") def load_devices(self): + """Load device data from Nautobot.""" if self.job.debug: self.job.logger.debug(f"Loading Device data from Nautobot...") @@ -118,7 +122,7 @@ def load_devices(self): interfaces=interface_list, mask_length=device.primary_ip4.mask_length if device.primary_ip4 else None, serial=device.serial, - ) # type: ignore + ) # type: ignore self.add(onboarding_device) if self.job.debug: self.job.logger.debug(f"Device: {device.name} loaded.") @@ -184,6 +188,7 @@ def _handle_failed_connections(self, device_data): self.device_data = device_data def execute_command_getter(self): + """Start the CommandGetterDO job to query devices for data.""" if self.job.platform: if not self.job.platform.network_driver: self.job.logger.error( @@ -191,15 +196,12 @@ def execute_command_getter(self): "does not have a network driver, please update the Platform." ) raise Exception("Platform.network_driver missing") - + command_getter_job = Job.objects.get(name="Command Getter for Device Onboarding") job_kwargs = self.job.prepare_job_kwargs(self.job.job_result.task_kwargs) kwargs = self.job.serialize_data(job_kwargs) result = JobResult.enqueue_job( - job_model=command_getter_job, - user=self.job.user, - celery_kwargs=self.job.job_result.celery_kwargs, - **kwargs + job_model=command_getter_job, user=self.job.user, celery_kwargs=self.job.job_result.celery_kwargs, **kwargs ) while True: if result.status not in JobResultStatusChoices.READY_STATES: @@ -210,7 +212,6 @@ def execute_command_getter(self): if self.job.debug: self.job.logger.debug(f"Command Getter Job Result: {result.result}") self._handle_failed_connections(device_data=result.result) - def load_manufacturers(self): """Load manufacturer data into a DiffSync model.""" @@ -220,7 +221,7 @@ def load_manufacturers(self): onboarding_manufacturer = self.manufacturer( diffsync=self, name=self.device_data[ip_address]["manufacturer"], - ) # type: ignore + ) # type: ignore try: self.add(onboarding_manufacturer) except diffsync.ObjectAlreadyExists: @@ -236,7 +237,7 @@ def load_platforms(self): name=self.device_data[ip_address]["platform"], manufacturer__name=self.device_data[ip_address]["manufacturer"], network_driver=self.device_data[ip_address]["network_driver"], - ) # type: ignore + ) # type: ignore try: self.add(onboarding_platform) except diffsync.ObjectAlreadyExists: @@ -252,7 +253,7 @@ def load_device_types(self): model=self.device_data[ip_address]["device_type"], part_number=self.device_data[ip_address]["device_type"], manufacturer__name=self.device_data[ip_address]["manufacturer"], - ) # type: ignore + ) # type: ignore try: self.add(onboarding_device_type) except diffsync.ObjectAlreadyExists: @@ -260,10 +261,6 @@ def load_device_types(self): def load_devices(self): """Load device data into a DiffSync model.""" - - # PROVIDE TO JOB: ip4address, port, timeout, secrets_group, platform (optional) - # TODO: CHECK FOR FAILED CONNECTIONS AND DO NOT LOAD DATA, LOG FAILED IPs - for ip_address in self.device_data: if self.job.debug: self.job.logger.debug(f"loading device data for {ip_address}") @@ -281,7 +278,7 @@ def load_devices(self): interfaces=[self.device_data[ip_address]["mgmt_interface"]], mask_length=self.device_data[ip_address]["mask_length"], serial=self.device_data[ip_address]["serial"], - ) # type: ignore + ) # type: ignore try: self.add(onboarding_device) if self.job.debug: diff --git a/nautobot_device_onboarding/diffsync/models/network_importer_models.py b/nautobot_device_onboarding/diffsync/models/network_importer_models.py index 1249b9ad..f30d0c7d 100644 --- a/nautobot_device_onboarding/diffsync/models/network_importer_models.py +++ b/nautobot_device_onboarding/diffsync/models/network_importer_models.py @@ -1,10 +1,11 @@ """Diffsync models.""" -from nautobot_ssot.contrib import NautobotModel -from nautobot.dcim.models import Device, Interface -from nautobot.ipam.models import IPAddress from typing import List, Optional + from diffsync import DiffSync +from nautobot.dcim.models import Device, Interface +from nautobot.ipam.models import IPAddress +from nautobot_ssot.contrib import NautobotModel class FilteredNautobotModel(NautobotModel): @@ -27,18 +28,20 @@ def _get_queryset(cls, diffsync: "DiffSync"): @classmethod def get_queryset(cls, diffsync: "DiffSync"): """Get the queryset used to load the models data from Nautobot.""" - # Replace return with a filtered queryset. + # Replace return with a filtered queryset. # Access the job form inputs with diffsync ex: diffsync.job.location.name return cls._model.objects.all() class NetworkImporterDevice(FilteredNautobotModel): + """Diffsync model for device data.""" + _modelname = "device" _model = Device _identifiers = ( - "location__name", - "name", - "serial", + "location__name", + "name", + "serial", ) _children = {"interface": "interfaces"} @@ -71,11 +74,13 @@ def _get_queryset(cls, diffsync: "DiffSync"): class NetworkImporterInterface(FilteredNautobotModel): + """Diffsync model for interface data.""" + _modelname = "interface" _model = Interface _identifiers = ( - "device__name", - "name", + "device__name", + "name", ) _children = {"ip_address": "ip_addresses"} device__name: str @@ -85,6 +90,8 @@ class NetworkImporterInterface(FilteredNautobotModel): class NetworkImporterIPAddress(FilteredNautobotModel): + """Diffsync model for ip address data.""" + _modelname = "ip_address" _model = IPAddress _identifiers = ( diff --git a/nautobot_device_onboarding/diffsync/models/onboarding_models.py b/nautobot_device_onboarding/diffsync/models/onboarding_models.py index 7e6c60dd..c94dd3ca 100644 --- a/nautobot_device_onboarding/diffsync/models/onboarding_models.py +++ b/nautobot_device_onboarding/diffsync/models/onboarding_models.py @@ -3,6 +3,7 @@ import ipaddress from typing import List, Optional +from diffsync import DiffSyncModel from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist, ValidationError from nautobot.apps.choices import InterfaceTypeChoices, PrefixTypeChoices from nautobot.dcim.models import Device, DeviceType, Interface, Manufacturer, Platform @@ -10,10 +11,10 @@ from nautobot.ipam.models import IPAddress, Prefix from nautobot_ssot.contrib import NautobotModel -from diffsync import DiffSyncModel - class OnboardingDevice(DiffSyncModel): + """Diffsync model for device data.""" + _modelname = "device" _identifiers = ( "location__name", @@ -56,7 +57,7 @@ def _get_or_create_device(cls, platform, diffsync, ids, attrs): try: # Only Devices with a primary ip address are loaded from Nautobot when syncing. # If a device is found in Nautobot with a matching name and location as the - # device being created, but the primary ip address doesn't match an ip address entered, + # device being created, but the primary ip address doesn't match an ip address entered, # the matching device will be updated or skipped based on user preference. device = Device.objects.get( @@ -258,7 +259,7 @@ def update(self, attrs): # Update the primary ip address only # The OnboardingNautobotAdapter only loads devices with primary ips matching those - # entered for onboarding. This will not be called unless the adapter is changed to + # entered for onboarding. This will not be called unless the adapter is changed to # include all devices if attrs.get("primary_ip4__host"): if not attrs.get("mask_length"): @@ -278,6 +279,8 @@ def update(self, attrs): class OnboardingDeviceType(NautobotModel): + """Diffsync model for device type data.""" + _modelname = "device_type" _model = DeviceType _identifiers = ("model", "manufacturer__name") @@ -290,6 +293,8 @@ class OnboardingDeviceType(NautobotModel): class OnboardingManufacturer(NautobotModel): + """Diffsync model for manufacturer data.""" + _modelname = "manufacturer" _model = Manufacturer _identifiers = ("name",) @@ -298,6 +303,8 @@ class OnboardingManufacturer(NautobotModel): class OnboardingPlatform(NautobotModel): + """Diffsync model for platform data.""" + _modelname = "platform" _model = Platform _identifiers = ("name",) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index a7746191..8c00d8e6 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -1,15 +1,23 @@ """Device Onboarding Jobs.""" + import logging from diffsync.enum import DiffSyncFlags from django.conf import settings from django.templatetags.static import static -from nautobot.apps.jobs import BooleanVar, IntegerVar, Job, ObjectVar, StringVar, MultiObjectVar +from nautobot.apps.jobs import BooleanVar, IntegerVar, Job, MultiObjectVar, ObjectVar, StringVar from nautobot.core.celery import register_jobs -from nautobot.dcim.models import DeviceType, Location, Platform, Device +from nautobot.dcim.models import Device, DeviceType, Location, Platform from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from nautobot.extras.models import Role, SecretsGroup, SecretsGroupAssociation, Status, Tag from nautobot.ipam.models import Namespace +from nautobot_ssot.jobs.base import DataSource +from nornir import InitNornir +from nornir.core.inventory import ConnectionOptions, Defaults, Groups, Host, Hosts, Inventory +from nornir.core.plugins.inventory import InventoryPluginRegister +from nornir.core.task import Result, Task +from nornir_netmiko.tasks import netmiko_send_command + from nautobot_device_onboarding.diffsync.adapters.network_importer_adapters import ( NetworkImporterNautobotAdapter, NetworkImporterNetworkAdapter, @@ -26,12 +34,6 @@ from nautobot_device_onboarding.nornir_plays.logger import NornirLogger from nautobot_device_onboarding.nornir_plays.processor import ProcessorDO from nautobot_device_onboarding.utils.inventory_creator import _set_inventory -from nautobot_ssot.jobs.base import DataSource -from nornir import InitNornir -from nornir.core.inventory import ConnectionOptions, Defaults, Groups, Host, Hosts, Inventory -from nornir.core.plugins.inventory import InventoryPluginRegister -from nornir.core.task import Result, Task -from nornir_netmiko.tasks import netmiko_send_command InventoryPluginRegister.register("empty-inventory", EmptyInventory) @@ -143,9 +145,9 @@ def _onboard(self, address): password=self.password, secret=self.secret, napalm_driver=self.platform.napalm_driver if self.platform and self.platform.napalm_driver else None, - optional_args=self.platform.napalm_args - if self.platform and self.platform.napalm_args - else settings.NAPALM_ARGS, + optional_args=( + self.platform.napalm_args if self.platform and self.platform.napalm_args else settings.NAPALM_ARGS + ), ) netdev.get_onboarding_facts() netdev_dict = netdev.get_netdev_dict() @@ -248,8 +250,8 @@ class Meta: update_devices_without_primary_ip = BooleanVar( default=False, description="If a device at the specified location already exists in Nautobot but the primary ip address " - "does not match an ip address entered, update this device with the sync." - ) + "does not match an ip address entered, update this device with the sync.", + ) device_role = ObjectVar( model=Role, query_params={"content_types": "dcim.device"}, @@ -295,28 +297,27 @@ def load_target_adapter(self): self.target_adapter.load() def run( - self, - dryrun, - memory_profiling, - debug, - location, - namespace, - ip_addresses, - management_only_interface, - update_devices_without_primary_ip, - device_role, - device_status, - interface_status, - ip_address_status, - port, - timeout, - secrets_group, - platform, - *args, - **kwargs - ): # pylint:disable=arguments-differ + self, + dryrun, + memory_profiling, + debug, + location, + namespace, + ip_addresses, + management_only_interface, + update_devices_without_primary_ip, + device_role, + device_status, + interface_status, + ip_address_status, + port, + timeout, + secrets_group, + platform, + *args, + **kwargs, + ): # pylint:disable=arguments-differ """Run sync.""" - self.dryrun = dryrun self.memory_profiling = memory_profiling self.debug = debug @@ -352,6 +353,7 @@ def run( } super().run(dryrun, memory_profiling, *args, **kwargs) + class SSOTNetworkImporter(DataSource): """Job syncing extended device attributes into Nautobot.""" @@ -405,18 +407,9 @@ def load_target_adapter(self): self.target_adapter.load() def run( - self, - dryrun, - memory_profiling, - debug, - location, - devices, - device_role, - tag, - *args, - **kwargs): # pylint:disable=arguments-differ + self, dryrun, memory_profiling, debug, location, devices, device_role, tag, *args, **kwargs + ): # pylint:disable=arguments-differ """Run sync.""" - self.dryrun = dryrun self.memory_profiling = memory_profiling self.debug = debug @@ -448,24 +441,23 @@ class Meta: # pylint: disable=too-few-public-methods hidden = False debug = BooleanVar() - ip_addresses = StringVar() + ip_addresses = StringVar() port = IntegerVar() timeout = IntegerVar() secrets_group = ObjectVar(model=SecretsGroup) - platform = ObjectVar(model=Platform) + platform = ObjectVar(model=Platform, required=False) - def process_command_getter_result(self, command_result, ip_addresses): - """Process the data returned from CommandGetterDO""" + def _process_result(self, command_result, ip_addresses): + """Process the data returned from devices.""" processed_device_data = {} for ip_address in ip_addresses: processed_device_data[ip_address] = command_result[ip_address] if self.debug: self.logger.debug(f"Processed CommandGetterDO return for {ip_address}: {command_result[ip_address]}") return processed_device_data - + def run(self, *args, **kwargs): """Process onboarding task from ssot-ni job.""" - self.ip_addresses = kwargs["ip_addresses"].replace(" ", "").split(",") self.port = kwargs["port"] self.timeout = kwargs["timeout"] @@ -479,20 +471,22 @@ def run(self, *args, **kwargs): with InitNornir( runner=NORNIR_SETTINGS.get("runner"), logging={"enabled": False}, - inventory={"plugin": "empty-inventory",}, + inventory={ + "plugin": "empty-inventory", + }, ) as nornir_obj: nr_with_processors = nornir_obj.with_processors([ProcessorDO(logger, compiled_results)]) ip_address = self.ip_addresses inventory_constructed = _set_inventory(ip_address, self.platform, self.port, self.secrets_group) nr_with_processors.inventory.hosts.update(inventory_constructed) nr_with_processors.run(task=netmiko_send_commands) - final_result = self.process_command_getter_result(compiled_results, self.ip_addresses) + final_result = self._process_result(compiled_results, self.ip_addresses) #### Remove before final merge #### for host, data in nr_with_processors.inventory.hosts.items(): self.logger.info("%s;\n%s", host, data.dict()) #### End #### - + except Exception as err: self.logger.info("Error: %s", err) return err diff --git a/nautobot_device_onboarding/nautobot_keeper.py b/nautobot_device_onboarding/nautobot_keeper.py index 079a5ed7..0de8b094 100644 --- a/nautobot_device_onboarding/nautobot_keeper.py +++ b/nautobot_device_onboarding/nautobot_keeper.py @@ -1,20 +1,17 @@ """Nautobot Keeper.""" -import logging import ipaddress +import logging from django.conf import settings from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ValidationError from nautobot.apps.choices import PrefixTypeChoices from nautobot.dcim.choices import InterfaceTypeChoices -from nautobot.dcim.models import Manufacturer, Device, Interface, DeviceType -from nautobot.extras.models import Role -from nautobot.dcim.models import Platform -from nautobot.dcim.models import Location -from nautobot.extras.models import Status +from nautobot.dcim.models import Device, DeviceType, Interface, Location, Manufacturer, Platform +from nautobot.extras.models import Role, Status from nautobot.extras.models.customfields import CustomField -from nautobot.ipam.models import IPAddress, Prefix, Namespace +from nautobot.ipam.models import IPAddress, Namespace, Prefix from nautobot_device_onboarding.constants import NETMIKO_TO_NAPALM_STATIC from nautobot_device_onboarding.exceptions import OnboardException diff --git a/nautobot_device_onboarding/netdev_keeper.py b/nautobot_device_onboarding/netdev_keeper.py index 89f64471..f289f9ab 100644 --- a/nautobot_device_onboarding/netdev_keeper.py +++ b/nautobot_device_onboarding/netdev_keeper.py @@ -6,18 +6,15 @@ from django.conf import settings from napalm import get_network_driver -from napalm.base.exceptions import ConnectionException, CommandErrorException +from napalm.base.exceptions import CommandErrorException, ConnectionException from napalm.base.netmiko_helpers import netmiko_args -from netmiko import SSHDetect -from netmiko import NetMikoAuthenticationException -from netmiko import NetMikoTimeoutException -from paramiko.ssh_exception import SSHException - from nautobot.dcim.models import Platform +from netmiko import NetMikoAuthenticationException, NetMikoTimeoutException, SSHDetect +from paramiko.ssh_exception import SSHException -from nautobot_device_onboarding.onboarding.onboarding import StandaloneOnboarding from nautobot_device_onboarding.constants import NETMIKO_TO_NAPALM_STATIC from nautobot_device_onboarding.exceptions import OnboardException +from nautobot_device_onboarding.onboarding.onboarding import StandaloneOnboarding logger = logging.getLogger("rq.worker") diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index 91b4eb44..5fb42f12 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -1,8 +1,12 @@ -from nautobot_device_onboarding.constants import PLATFORM_COMMAND_MAP +"""Command Getter.""" + from nornir.core.task import Task from nornir_netmiko.tasks import netmiko_send_command +from nautobot_device_onboarding.constants import PLATFORM_COMMAND_MAP + def netmiko_send_commands(task: Task): + """Run commands specified in PLATFORM_COMMAND_MAP.""" for command in PLATFORM_COMMAND_MAP.get(task.host.platform, "default"): task.run(task=netmiko_send_command, name=command, command_string=command, use_textfsm=True) diff --git a/nautobot_device_onboarding/nornir_plays/empty_inventory.py b/nautobot_device_onboarding/nornir_plays/empty_inventory.py index 8d13a0ae..460717d2 100755 --- a/nautobot_device_onboarding/nornir_plays/empty_inventory.py +++ b/nautobot_device_onboarding/nornir_plays/empty_inventory.py @@ -1,4 +1,5 @@ """Empty Nornir Inventory Plugin.""" + from nornir.core.inventory import Defaults, Groups, Hosts, Inventory diff --git a/nautobot_device_onboarding/nornir_plays/logger.py b/nautobot_device_onboarding/nornir_plays/logger.py index 4fb31b04..a7975c3c 100755 --- a/nautobot_device_onboarding/nornir_plays/logger.py +++ b/nautobot_device_onboarding/nornir_plays/logger.py @@ -1,4 +1,5 @@ """Custom logger to support writing to console and db.""" + import logging from typing import Any @@ -43,4 +44,4 @@ def error(self, message: str, extra: Any = None): def critical(self, message: str, extra: Any = None): """Match standard Python Library critical signature.""" - self._logging_helper("critical", message, extra) \ No newline at end of file + self._logging_helper("critical", message, extra) diff --git a/nautobot_device_onboarding/nornir_plays/processor.py b/nautobot_device_onboarding/nornir_plays/processor.py index 0ac71ebe..428def3f 100755 --- a/nautobot_device_onboarding/nornir_plays/processor.py +++ b/nautobot_device_onboarding/nornir_plays/processor.py @@ -1,10 +1,12 @@ """Processor used by Device Onboarding to catch unknown errors.""" from typing import Dict + from nornir.core.inventory import Host from nornir.core.task import AggregatedResult, MultiResult, Task from nornir_nautobot.exceptions import NornirNautobotException from nornir_nautobot.plugins.processors import BaseLoggingProcessor + from nautobot_device_onboarding.utils.formatter import format_ob_data_ios, format_ob_data_nxos @@ -17,12 +19,14 @@ def __init__(self, logger, command_outputs): self.data: Dict = command_outputs def task_started(self, task: Task) -> None: + """Boilerplate Nornir processor for task_started.""" self.data[task.name] = {} - #self.data[task.name]["started"] = True + # self.data[task.name]["started"] = True self.logger.info(f"Task Name: {task.name} started") def task_completed(self, task: Task, result: AggregatedResult) -> None: - #self.data[task.name]["completed"] = True + """Boilerplate Nornir processor for task_instance_completed.""" + # self.data[task.name]["completed"] = True self.logger.info(f"Task Name: {task.name} completed") def task_instance_started(self, task: Task, host: Host) -> None: @@ -46,7 +50,7 @@ def task_instance_completed(self, task: Task, host: Host, result: MultiResult) - if result.failed: for level_1_result in result: if hasattr(level_1_result, "exception") and hasattr(level_1_result.exception, "result"): - for level_2_result in level_1_result.exception.result: # type: ignore + for level_2_result in level_1_result.exception.result: # type: ignore if isinstance(level_2_result.exception, NornirNautobotException): return self.logger.critical(f"{task.name} failed: {result.exception}", extra={"object": task.host}) @@ -62,7 +66,7 @@ def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult """Processor for Logging on SubTask Completed.""" self.logger.info(f"Subtask completed {task.name}.", extra={"object": task.host}) self.logger.info(f"Subtask result {result.result}.", extra={"object": task.host}) - + self.data[task.name][host.name] = { "failed": result.failed, "subtask_result": result.result, @@ -95,4 +99,4 @@ def subtask_instance_started(self, task: Task, host: Host) -> None: """Processor for Logging on SubTask Start.""" self.logger.info(f"Subtask starting {task.name}.", extra={"object": task.host}) self.data[task.name] = {} - #self.data[task.name][host.name] = {"started": True} + # self.data[task.name][host.name] = {"started": True} diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index 9068cdc1..f16131fc 100644 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -1,3 +1,6 @@ +"""Formatter.""" + + def format_ob_data_ios(host, result): """Format the data for onboarding IOS devices.""" primary_ip4 = host.name diff --git a/nautobot_device_onboarding/utils/inventory_creator.py b/nautobot_device_onboarding/utils/inventory_creator.py index 310b789b..c8a92dcf 100755 --- a/nautobot_device_onboarding/utils/inventory_creator.py +++ b/nautobot_device_onboarding/utils/inventory_creator.py @@ -2,10 +2,11 @@ from django.conf import settings from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices -from nautobot_device_onboarding.exceptions import OnboardException from netmiko import SSHDetect from nornir.core.inventory import ConnectionOptions, Host +from nautobot_device_onboarding.exceptions import OnboardException + def _parse_credentials(credentials): """Parse and return dictionary of credentials.""" @@ -46,8 +47,7 @@ def guess_netmiko_device_type(hostname, username, password, port): "host": hostname, "username": username, "password": password, - "port": port - **netmiko_optional_args, + "port": port**netmiko_optional_args, } try: From 673cff79a55b6cd1d6153840791a2d99349d87e5 Mon Sep 17 00:00:00 2001 From: David Cates Date: Mon, 29 Jan 2024 17:24:35 -0700 Subject: [PATCH 025/225] black and flake 8, some form description updates --- .../diffsync/adapters/onboarding_adapters.py | 8 +++---- .../models/network_importer_models.py | 2 +- .../diffsync/models/onboarding_models.py | 4 ++-- nautobot_device_onboarding/jobs.py | 24 ++++++++----------- .../migrations/0001_initial.py | 5 ++-- .../0001_squash__0001_0004_0005_0006.py | 2 +- .../0004_migrate_to_extras_role_part_1.py | 1 - .../tests/test_basic.py | 4 +++- .../tests/test_nautobot_keeper.py | 8 ++++--- .../tests/test_onboarding.py | 1 + .../utils/inventory_creator.py | 6 ++--- 11 files changed, 33 insertions(+), 32 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index 1d09e817..22587dfe 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -59,7 +59,7 @@ def load_manufacturers(self): """Load manufacturer data from Nautobot.""" for manufacturer in Manufacturer.objects.all(): if self.job.debug: - self.job.logger.debug(f"Loading Manufacturer data from Nautobot...") + self.job.logger.debug("Loading Manufacturer data from Nautobot...") onboarding_manufacturer = self.manufacturer(diffsync=self, name=manufacturer.name) self.add(onboarding_manufacturer) if self.job.debug: @@ -68,7 +68,7 @@ def load_manufacturers(self): def load_platforms(self): """Load platform data from Nautobot.""" if self.job.debug: - self.job.logger.debug(f"Loading Platform data from Nautobot...") + self.job.logger.debug("Loading Platform data from Nautobot...") for platform in Platform.objects.all(): onboarding_platform = self.platform( diffsync=self, @@ -83,7 +83,7 @@ def load_platforms(self): def load_device_types(self): """Load device type data from Nautobot.""" if self.job.debug: - self.job.logger.debug(f"Loading DeviceType data from Nautobot...") + self.job.logger.debug("Loading DeviceType data from Nautobot...") for device_type in DeviceType.objects.all(): onboarding_device_type = self.device_type( diffsync=self, @@ -98,7 +98,7 @@ def load_device_types(self): def load_devices(self): """Load device data from Nautobot.""" if self.job.debug: - self.job.logger.debug(f"Loading Device data from Nautobot...") + self.job.logger.debug("Loading Device data from Nautobot...") # for device in Device.objects.filter(primary_ip4__host__in=self.job.ip_addresses): for device in Device.objects.all(): diff --git a/nautobot_device_onboarding/diffsync/models/network_importer_models.py b/nautobot_device_onboarding/diffsync/models/network_importer_models.py index f30d0c7d..6f4fdf35 100644 --- a/nautobot_device_onboarding/diffsync/models/network_importer_models.py +++ b/nautobot_device_onboarding/diffsync/models/network_importer_models.py @@ -1,6 +1,6 @@ """Diffsync models.""" -from typing import List, Optional +from typing import List from diffsync import DiffSync from nautobot.dcim.models import Device, Interface diff --git a/nautobot_device_onboarding/diffsync/models/onboarding_models.py b/nautobot_device_onboarding/diffsync/models/onboarding_models.py index c94dd3ca..1f45ac83 100644 --- a/nautobot_device_onboarding/diffsync/models/onboarding_models.py +++ b/nautobot_device_onboarding/diffsync/models/onboarding_models.py @@ -1,7 +1,7 @@ """Diffsync models.""" import ipaddress -from typing import List, Optional +from typing import Optional from diffsync import DiffSyncModel from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist, ValidationError @@ -111,7 +111,7 @@ def _get_or_create_ip_address(cls, diffsync, attrs): namespace=diffsync.job.namespace, status=diffsync.job.ip_address_status, ) - except ValidationError as err: + except ValidationError: diffsync.job.logger.warning( f"No suitable parent Prefix exists for IP {attrs['primary_ip4__host']} in " f"Namespace {diffsync.job.namespace.name}, a new Prefix will be created." diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 8c00d8e6..2814f83d 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -4,7 +4,6 @@ from diffsync.enum import DiffSyncFlags from django.conf import settings -from django.templatetags.static import static from nautobot.apps.jobs import BooleanVar, IntegerVar, Job, MultiObjectVar, ObjectVar, StringVar from nautobot.core.celery import register_jobs from nautobot.dcim.models import Device, DeviceType, Location, Platform @@ -13,10 +12,7 @@ from nautobot.ipam.models import Namespace from nautobot_ssot.jobs.base import DataSource from nornir import InitNornir -from nornir.core.inventory import ConnectionOptions, Defaults, Groups, Host, Hosts, Inventory from nornir.core.plugins.inventory import InventoryPluginRegister -from nornir.core.task import Result, Task -from nornir_netmiko.tasks import netmiko_send_command from nautobot_device_onboarding.diffsync.adapters.network_importer_adapters import ( NetworkImporterNautobotAdapter, @@ -233,11 +229,11 @@ class Meta: location = ObjectVar( model=Location, query_params={"content_type": "dcim.device"}, - description="Assigned Location for the onboarded device(s)", + description="Assigned Location for all synced device(s)", ) namespace = ObjectVar(model=Namespace, description="Namespace ip addresses belong to.") ip_addresses = StringVar( - description="IP address of the device to onboard, specify in a comma separated list for multiple devices.", + description="IP address of the device to sync, specify in a comma separated list for multiple devices.", label="IPv4 addresses", ) port = IntegerVar(default=22) @@ -245,7 +241,7 @@ class Meta: management_only_interface = BooleanVar( default=False, label="Set Management Only", - description="If True, interfaces that are created or updated will be set to management only. If False, the interface will be set to not be management only.", + description="If True, new interfaces that are created will be set to management only. If False, new interfaces will be set to not be management only.", ) update_devices_without_primary_ip = BooleanVar( default=False, @@ -256,26 +252,26 @@ class Meta: model=Role, query_params={"content_types": "dcim.device"}, required=True, - description="Role to be applied to all new onboarded devices", + description="Role to be applied to all synced devices", ) device_status = ObjectVar( model=Status, query_params={"content_types": "dcim.device"}, required=True, - description="Status to be applied to all new onboarded devices", + description="Status to be applied to all synced devices", ) interface_status = ObjectVar( model=Status, query_params={"content_types": "dcim.interface"}, required=True, - description="Status to be applied to all new onboarded device interfaces", + description="Status to be applied to all new synced device interfaces. This value does not update with additional syncs.", ) ip_address_status = ObjectVar( label="IP address status", model=Status, query_params={"content_types": "ipam.ipaddress"}, required=True, - description="Status to be applied to all new onboarded IP addresses.", + description="Status to be applied to all new synced IP addresses. This value does not update with additional syncs.", ) secrets_group = ObjectVar( model=SecretsGroup, required=True, description="SecretsGroup for device connection credentials." @@ -342,7 +338,7 @@ def run( "ip_addresses": ip_addresses, "management_only_interface": management_only_interface, "update_devices_without_primary_ip": update_devices_without_primary_ip, - "update_devices_without_primary_ip": device_role, + "device_role": device_role, "device_status": device_status, "interface_status": interface_status, "ip_address_status": ip_address_status, @@ -482,10 +478,10 @@ def run(self, *args, **kwargs): nr_with_processors.run(task=netmiko_send_commands) final_result = self._process_result(compiled_results, self.ip_addresses) - #### Remove before final merge #### + # Remove before final merge # for host, data in nr_with_processors.inventory.hosts.items(): self.logger.info("%s;\n%s", host, data.dict()) - #### End #### + # End # except Exception as err: self.logger.info("Error: %s", err) diff --git a/nautobot_device_onboarding/migrations/0001_initial.py b/nautobot_device_onboarding/migrations/0001_initial.py index db85e40b..0c555c5a 100644 --- a/nautobot_device_onboarding/migrations/0001_initial.py +++ b/nautobot_device_onboarding/migrations/0001_initial.py @@ -1,9 +1,10 @@ # Generated by Django 3.1.3 on 2021-02-22 03:40 -from django.db import migrations, models -import django.db.models.deletion import uuid +import django.db.models.deletion +from django.db import migrations, models + class Migration(migrations.Migration): initial = True diff --git a/nautobot_device_onboarding/migrations/0001_squash__0001_0004_0005_0006.py b/nautobot_device_onboarding/migrations/0001_squash__0001_0004_0005_0006.py index 8e3cb92d..5e9ccf3f 100644 --- a/nautobot_device_onboarding/migrations/0001_squash__0001_0004_0005_0006.py +++ b/nautobot_device_onboarding/migrations/0001_squash__0001_0004_0005_0006.py @@ -2,8 +2,8 @@ import uuid -from django.db import migrations, models import django.db.models.deletion +from django.db import migrations, models from nautobot.extras.models import RoleField diff --git a/nautobot_device_onboarding/migrations/0004_migrate_to_extras_role_part_1.py b/nautobot_device_onboarding/migrations/0004_migrate_to_extras_role_part_1.py index 5f531fe2..7daeb497 100644 --- a/nautobot_device_onboarding/migrations/0004_migrate_to_extras_role_part_1.py +++ b/nautobot_device_onboarding/migrations/0004_migrate_to_extras_role_part_1.py @@ -1,5 +1,4 @@ from django.db import migrations, models - from nautobot.extras.models import RoleField diff --git a/nautobot_device_onboarding/tests/test_basic.py b/nautobot_device_onboarding/tests/test_basic.py index e0fd647d..efdafbb0 100644 --- a/nautobot_device_onboarding/tests/test_basic.py +++ b/nautobot_device_onboarding/tests/test_basic.py @@ -1,6 +1,8 @@ """Basic tests that do not require Django.""" -import unittest + import os +import unittest + import toml from nautobot_device_onboarding import __version__ as project_version diff --git a/nautobot_device_onboarding/tests/test_nautobot_keeper.py b/nautobot_device_onboarding/tests/test_nautobot_keeper.py index 458545f4..5cfe8cd3 100644 --- a/nautobot_device_onboarding/tests/test_nautobot_keeper.py +++ b/nautobot_device_onboarding/tests/test_nautobot_keeper.py @@ -1,13 +1,15 @@ """Unit tests for nautobot_device_onboarding.onboard module and its classes.""" + from django.conf import settings from django.contrib.contenttypes.models import ContentType from django.test import TestCase from nautobot.dcim.choices import InterfaceTypeChoices -from nautobot.dcim.models import Location, LocationType, Manufacturer, DeviceType, Device, Interface, Platform -from nautobot.extras.models import Role, Status, CustomField -from nautobot.ipam.models import IPAddress +from nautobot.dcim.models import Device, DeviceType, Interface, Location, LocationType, Manufacturer, Platform from nautobot.extras.choices import CustomFieldTypeChoices +from nautobot.extras.models import CustomField, Role, Status from nautobot.extras.models.secrets import SecretsGroup +from nautobot.ipam.models import IPAddress + from nautobot_device_onboarding.exceptions import OnboardException from nautobot_device_onboarding.nautobot_keeper import NautobotKeeper diff --git a/nautobot_device_onboarding/tests/test_onboarding.py b/nautobot_device_onboarding/tests/test_onboarding.py index f427bba2..d5167c0b 100644 --- a/nautobot_device_onboarding/tests/test_onboarding.py +++ b/nautobot_device_onboarding/tests/test_onboarding.py @@ -1,4 +1,5 @@ """Unit tests for nautobot_device_onboarding.netdev_keeper module and its classes.""" + # from unittest import mock from django.conf import settings diff --git a/nautobot_device_onboarding/utils/inventory_creator.py b/nautobot_device_onboarding/utils/inventory_creator.py index c8a92dcf..c530fa70 100755 --- a/nautobot_device_onboarding/utils/inventory_creator.py +++ b/nautobot_device_onboarding/utils/inventory_creator.py @@ -25,7 +25,7 @@ def _parse_credentials(credentials): access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, secret_type=SecretsGroupSecretTypeChoices.TYPE_SECRET, ) - except: + except Exception: secret = None except Exception as err: raise OnboardException("fail-credentials - Unable to parse selected credentials.") from err @@ -40,14 +40,14 @@ def guess_netmiko_device_type(hostname, username, password, port): """Guess the device type of host, based on Netmiko.""" guessed_device_type = None - netmiko_optional_args = {} + netmiko_optional_args = {"port": port} remote_device = { "device_type": "autodetect", "host": hostname, "username": username, "password": password, - "port": port**netmiko_optional_args, + **netmiko_optional_args, } try: From e6738811840dd50f9d1c080c146fa98e2abe1315 Mon Sep 17 00:00:00 2001 From: David Cates Date: Mon, 29 Jan 2024 17:45:20 -0700 Subject: [PATCH 026/225] update nautobot version in ci.yml --- .github/workflows/ci.yml | 6 +-- nautobot_device_onboarding/jobs.py | 4 +- poetry.lock | 78 +++++++++++++++--------------- 3 files changed, 45 insertions(+), 43 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d9c7844b..d51c9f35 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -95,7 +95,7 @@ jobs: fail-fast: true matrix: python-version: ["3.11"] - nautobot-version: ["2.0.3"] + nautobot-version: ["2.1.1"] env: INVOKE_NAUTOBOT_DEVICE_ONBOARDING_PYTHON_VER: "${{ matrix.python-version }}" INVOKE_NAUTOBOT_DEVICE_ONBOARDING_NAUTOBOT_VER: "${{ matrix.nautobot-version }}" @@ -138,7 +138,7 @@ jobs: fail-fast: true matrix: python-version: ["3.11"] - nautobot-version: ["2.0.3"] + nautobot-version: ["2.1.1"] env: INVOKE_NAUTOBOT_DEVICE_ONBOARDING_PYTHON_VER: "${{ matrix.python-version }}" INVOKE_NAUTOBOT_DEVICE_ONBOARDING_NAUTOBOT_VER: "${{ matrix.nautobot-version }}" @@ -181,7 +181,7 @@ jobs: include: - python-version: "3.11" db-backend: "postgresql" - nautobot-version: "2.0.3" + nautobot-version: "2.1.1" # - python-version: "3.11" # db-backend: "mysql" # nautobot-version: "stable" diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 2814f83d..93c4b984 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -252,13 +252,13 @@ class Meta: model=Role, query_params={"content_types": "dcim.device"}, required=True, - description="Role to be applied to all synced devices", + description="Role to be applied to all synced devices.", ) device_status = ObjectVar( model=Status, query_params={"content_types": "dcim.device"}, required=True, - description="Status to be applied to all synced devices", + description="Status to be applied to all synced devices.", ) interface_status = ObjectVar( model=Status, diff --git a/poetry.lock b/poetry.lock index 427076dd..702597a5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -220,33 +220,33 @@ files = [ [[package]] name = "black" -version = "24.1.0" +version = "24.1.1" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-24.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:94d5280d020dadfafc75d7cae899609ed38653d3f5e82e7ce58f75e76387ed3d"}, - {file = "black-24.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aaf9aa85aaaa466bf969e7dd259547f4481b712fe7ee14befeecc152c403ee05"}, - {file = "black-24.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec489cae76eac3f7573629955573c3a0e913641cafb9e3bfc87d8ce155ebdb29"}, - {file = "black-24.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:a5a0100b4bdb3744dd68412c3789f472d822dc058bb3857743342f8d7f93a5a7"}, - {file = "black-24.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6cc5a6ba3e671cfea95a40030b16a98ee7dc2e22b6427a6f3389567ecf1b5262"}, - {file = "black-24.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0e367759062dcabcd9a426d12450c6d61faf1704a352a49055a04c9f9ce8f5a"}, - {file = "black-24.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be305563ff4a2dea813f699daaffac60b977935f3264f66922b1936a5e492ee4"}, - {file = "black-24.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:6a8977774929b5db90442729f131221e58cc5d8208023c6af9110f26f75b6b20"}, - {file = "black-24.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d74d4d0da276fbe3b95aa1f404182562c28a04402e4ece60cf373d0b902f33a0"}, - {file = "black-24.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39addf23f7070dbc0b5518cdb2018468ac249d7412a669b50ccca18427dba1f3"}, - {file = "black-24.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:827a7c0da520dd2f8e6d7d3595f4591aa62ccccce95b16c0e94bb4066374c4c2"}, - {file = "black-24.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:0cd59d01bf3306ff7e3076dd7f4435fcd2fafe5506a6111cae1138fc7de52382"}, - {file = "black-24.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf8dd261ee82df1abfb591f97e174345ab7375a55019cc93ad38993b9ff5c6ad"}, - {file = "black-24.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:82d9452aeabd51d1c8f0d52d4d18e82b9f010ecb30fd55867b5ff95904f427ff"}, - {file = "black-24.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9aede09f72b2a466e673ee9fca96e4bccc36f463cac28a35ce741f0fd13aea8b"}, - {file = "black-24.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:780f13d03066a7daf1707ec723fdb36bd698ffa29d95a2e7ef33a8dd8fe43b5c"}, - {file = "black-24.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a15670c650668399c4b5eae32e222728185961d6ef6b568f62c1681d57b381ba"}, - {file = "black-24.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1e0fa70b8464055069864a4733901b31cbdbe1273f63a24d2fa9d726723d45ac"}, - {file = "black-24.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fa8d9aaa22d846f8c0f7f07391148e5e346562e9b215794f9101a8339d8b6d8"}, - {file = "black-24.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:f0dfbfbacfbf9cd1fac7a5ddd3e72510ffa93e841a69fcf4a6358feab1685382"}, - {file = "black-24.1.0-py3-none-any.whl", hash = "sha256:5134a6f6b683aa0a5592e3fd61dd3519d8acd953d93e2b8b76f9981245b65594"}, - {file = "black-24.1.0.tar.gz", hash = "sha256:30fbf768cd4f4576598b1db0202413fafea9a227ef808d1a12230c643cefe9fc"}, + {file = "black-24.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2588021038bd5ada078de606f2a804cadd0a3cc6a79cb3e9bb3a8bf581325a4c"}, + {file = "black-24.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a95915c98d6e32ca43809d46d932e2abc5f1f7d582ffbe65a5b4d1588af7445"}, + {file = "black-24.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fa6a0e965779c8f2afb286f9ef798df770ba2b6cee063c650b96adec22c056a"}, + {file = "black-24.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5242ecd9e990aeb995b6d03dc3b2d112d4a78f2083e5a8e86d566340ae80fec4"}, + {file = "black-24.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fc1ec9aa6f4d98d022101e015261c056ddebe3da6a8ccfc2c792cbe0349d48b7"}, + {file = "black-24.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0269dfdea12442022e88043d2910429bed717b2d04523867a85dacce535916b8"}, + {file = "black-24.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3d64db762eae4a5ce04b6e3dd745dcca0fb9560eb931a5be97472e38652a161"}, + {file = "black-24.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:5d7b06ea8816cbd4becfe5f70accae953c53c0e53aa98730ceccb0395520ee5d"}, + {file = "black-24.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e2c8dfa14677f90d976f68e0c923947ae68fa3961d61ee30976c388adc0b02c8"}, + {file = "black-24.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a21725862d0e855ae05da1dd25e3825ed712eaaccef6b03017fe0853a01aa45e"}, + {file = "black-24.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07204d078e25327aad9ed2c64790d681238686bce254c910de640c7cc4fc3aa6"}, + {file = "black-24.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:a83fe522d9698d8f9a101b860b1ee154c1d25f8a82ceb807d319f085b2627c5b"}, + {file = "black-24.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:08b34e85170d368c37ca7bf81cf67ac863c9d1963b2c1780c39102187ec8dd62"}, + {file = "black-24.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7258c27115c1e3b5de9ac6c4f9957e3ee2c02c0b39222a24dc7aa03ba0e986f5"}, + {file = "black-24.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40657e1b78212d582a0edecafef133cf1dd02e6677f539b669db4746150d38f6"}, + {file = "black-24.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:e298d588744efda02379521a19639ebcd314fba7a49be22136204d7ed1782717"}, + {file = "black-24.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:34afe9da5056aa123b8bfda1664bfe6fb4e9c6f311d8e4a6eb089da9a9173bf9"}, + {file = "black-24.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:854c06fb86fd854140f37fb24dbf10621f5dab9e3b0c29a690ba595e3d543024"}, + {file = "black-24.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3897ae5a21ca132efa219c029cce5e6bfc9c3d34ed7e892113d199c0b1b444a2"}, + {file = "black-24.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:ecba2a15dfb2d97105be74bbfe5128bc5e9fa8477d8c46766505c1dda5883aac"}, + {file = "black-24.1.1-py3-none-any.whl", hash = "sha256:5cdc2e2195212208fbcae579b931407c1fa9997584f0a415421748aeafff1168"}, + {file = "black-24.1.1.tar.gz", hash = "sha256:48b5760dcbfe5cf97fd4fba23946681f3a81514c6ab8a45b50da67ac8fbc6c7b"}, ] [package.dependencies] @@ -634,12 +634,13 @@ toml = ["tomli"] [[package]] name = "cron-descriptor" -version = "1.4.0" +version = "1.4.3" description = "A Python library that converts cron expressions into human readable strings." optional = false python-versions = "*" files = [ - {file = "cron_descriptor-1.4.0.tar.gz", hash = "sha256:b6ff4e3a988d7ca04a4ab150248e9f166fb7a5c828a85090e75bcc25aa93b4dd"}, + {file = "cron_descriptor-1.4.3-py3-none-any.whl", hash = "sha256:a67ba21804983b1427ed7f3e1ec27ee77bf24c652b0430239c268c5ddfbf9dc0"}, + {file = "cron_descriptor-1.4.3.tar.gz", hash = "sha256:7b1a00d7d25d6ae6896c0da4457e790b98cba778398a3d48e341e5e0d33f0488"}, ] [package.extras] @@ -733,17 +734,18 @@ redis = ["redis (>=4.3,<5.0)"] [[package]] name = "dill" -version = "0.3.7" +version = "0.3.8" description = "serialize all of Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, - {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, + {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, + {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, ] [package.extras] graph = ["objgraph (>=1.7.2)"] +profile = ["gprof2dot (>=2022.7.29)"] [[package]] name = "django" @@ -2343,13 +2345,13 @@ nornir = ">=3,<4" [[package]] name = "ntc-templates" -version = "4.1.0" +version = "4.2.0" description = "TextFSM Templates for Network Devices, and Python wrapper for TextFSM's CliTable." optional = false python-versions = ">=3.8,<4.0" files = [ - {file = "ntc_templates-4.1.0-py3-none-any.whl", hash = "sha256:61acf390ac22ee87c82c3923ea7cda8b2918f6321973de3b7878beedc2818cb1"}, - {file = "ntc_templates-4.1.0.tar.gz", hash = "sha256:c4985893f347852e1ddbdf8205c098fb23d837185020b4f7f909a547695794df"}, + {file = "ntc_templates-4.2.0-py3-none-any.whl", hash = "sha256:f41471c1375c1a86bb5958358339efe9e95d908ea33866125adafe36fbfe11dd"}, + {file = "ntc_templates-4.2.0.tar.gz", hash = "sha256:a06c0e786aa3aea429d345ea67f59cb6da43557c31aa65914969d0cd6b0c0dde"}, ] [package.dependencies] @@ -3029,13 +3031,13 @@ postgresql = ["psycopg2"] [[package]] name = "pytz" -version = "2023.3.post1" +version = "2023.4" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, - {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, + {file = "pytz-2023.4-py2.py3-none-any.whl", hash = "sha256:f90ef520d95e7c46951105338d918664ebfd6f1d995bd7d153127ce90efafa6a"}, + {file = "pytz-2023.4.tar.gz", hash = "sha256:31d4583c4ed539cd037956140d695e42c033a19e984bfce9964a3f7d59bc2b40"}, ] [[package]] @@ -3183,13 +3185,13 @@ ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)" [[package]] name = "referencing" -version = "0.32.1" +version = "0.33.0" description = "JSON Referencing + Python" optional = false python-versions = ">=3.8" files = [ - {file = "referencing-0.32.1-py3-none-any.whl", hash = "sha256:7e4dc12271d8e15612bfe35792f5ea1c40970dadf8624602e33db2758f7ee554"}, - {file = "referencing-0.32.1.tar.gz", hash = "sha256:3c57da0513e9563eb7e203ebe9bb3a1b509b042016433bd1e45a2853466c3dd3"}, + {file = "referencing-0.33.0-py3-none-any.whl", hash = "sha256:39240f2ecc770258f28b642dd47fd74bc8b02484de54e1882b74b35ebd779bd5"}, + {file = "referencing-0.33.0.tar.gz", hash = "sha256:c775fedf74bc0f9189c2a3be1c12fd03e8c23f4d371dce795df44e06c5b412f7"}, ] [package.dependencies] From ebe860c587dda89bc8d64577aa7654aa1728b424 Mon Sep 17 00:00:00 2001 From: Jan Snasel Date: Tue, 30 Jan 2024 12:07:32 +0000 Subject: [PATCH 027/225] fix: Drift management --- .cookiecutter.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.cookiecutter.json b/.cookiecutter.json index 55941943..c41fcdfa 100644 --- a/.cookiecutter.json +++ b/.cookiecutter.json @@ -4,11 +4,11 @@ "full_name": "Network to Code, LLC", "email": "info@networktocode.com", "github_org": "nautobot", - "plugin_name": "nautobot_device_onboarding", + "app_name": "nautobot_device_onboarding", "verbose_name": "Device Onboarding", - "plugin_slug": "nautobot-device-onboarding", - "project_slug": "nautobot-plugin-device-onboarding", - "repo_url": "https://github.com/nautobot/nautobot-plugin-device-onboarding", + "app_slug": "nautobot-device-onboarding", + "project_slug": "nautobot-app-device-onboarding", + "repo_url": "https://github.com/nautobot/nautobot-app-device-onboarding", "base_url": "nautobot-device-onboarding", "min_nautobot_version": "2.0.3", "max_nautobot_version": "2.9999", From 205f68ee91025213990316fdff0b4ae3d678a009 Mon Sep 17 00:00:00 2001 From: David Cates Date: Tue, 30 Jan 2024 09:25:35 -0700 Subject: [PATCH 028/225] pylint --- nautobot_device_onboarding/__init__.py | 6 ++++-- .../adapters/network_importer_adapters.py | 7 +++---- .../diffsync/adapters/onboarding_adapters.py | 11 +++++------ nautobot_device_onboarding/jobs.py | 16 ++++++++-------- 4 files changed, 20 insertions(+), 20 deletions(-) diff --git a/nautobot_device_onboarding/__init__.py b/nautobot_device_onboarding/__init__.py index 37411845..311735e1 100644 --- a/nautobot_device_onboarding/__init__.py +++ b/nautobot_device_onboarding/__init__.py @@ -1,6 +1,7 @@ """App declaration for nautobot_device_onboarding.""" -# Metadata is inherited from Nautobot. If not including Nautobot in the environment, this should be added +# Metadata is inherited from Nautobot +# If not including Nautobot in the environment, this should be added from importlib import metadata __version__ = metadata.version(__name__) @@ -15,7 +16,8 @@ class NautobotDeviceOnboardingConfig(NautobotAppConfig): verbose_name = "Device Onboarding" version = __version__ author = "Network to Code, LLC" - description = "Nautobot App that simplifies device onboarding (and re-onboarding) by collecting and populating common device 'facts' into Nautobot." + description = "Nautobot App that simplifies device onboarding (and re-onboarding) by \ + collecting and populating common device 'facts' into Nautobot." base_url = "nautobot-device-onboarding" required_settings = [] min_version = "2.1.1" diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index 3e7a7f64..7dece7db 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -42,13 +42,13 @@ class FilteredNautobotAdapter(NautobotAdapter): Must be used with FilteredNautobotModel. """ - def _load_objects(self, diffsync_model): + def _load_objects(self, diffsync_model): # pylint: disable=protected-access """Given a diffsync model class, load a list of models from the database and return them.""" parameter_names = self._get_parameter_names(diffsync_model) - for database_object in diffsync_model._get_queryset(diffsync=self): + for database_object in diffsync_model._get_queryset(diffsync=self): # pylint: disable=protected-access self.job.logger.debug( f"LOADING: Database Object: {database_object}, " - f"Model Name: {diffsync_model._modelname}, " + f"Model Name: {diffsync_model._modelname}, " # pylint: disable=protected-access f"Parameter Names: {parameter_names}" ) self._load_single_object(database_object, diffsync_model, parameter_names) @@ -82,7 +82,6 @@ def __init__(self, *args, job, sync=None, **kwargs): def load_devices(self): """Load device data from network devices.""" - pass def load(self): """Load network data.""" diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index 22587dfe..763f65d5 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -102,7 +102,7 @@ def load_devices(self): # for device in Device.objects.filter(primary_ip4__host__in=self.job.ip_addresses): for device in Device.objects.all(): - interface_list = list() + interface_list = [] # Only interfaces with the device's primeary ip should be considered for diff calculations for interface in device.interfaces.all(): if device.primary_ip4 in interface.ip_addresses.all(): @@ -165,8 +165,7 @@ def _validate_ip_addresses(self, ip_addresses): validation_successful = False if validation_successful: return True - else: - raise netaddr.AddrConversionError + raise netaddr.AddrConversionError def _handle_failed_connections(self, device_data): """ @@ -179,9 +178,9 @@ def _handle_failed_connections(self, device_data): for ip_address in device_data: if device_data[ip_address].get("failed"): - self.job.logger.error(f"Failed to connect to {ip_address}. This device will not be onboarded.") + self.job.logger.error(f"Connection or data error for {ip_address}. This device will not be onboarded.") if self.job.debug: - self.job.logger.debug(device_data[ip_address].get("subtask_result")) + self.job.logger.error(device_data[ip_address].get("subtask_result")) failed_ip_addresses.append(ip_address) for ip_address in failed_ip_addresses: del device_data[ip_address] @@ -195,7 +194,7 @@ def execute_command_getter(self): f"The selected platform, {self.job.platform} " "does not have a network driver, please update the Platform." ) - raise Exception("Platform.network_driver missing") + raise Exception("Platform.network_driver missing") # pylint: disable=broad-exception-raised command_getter_job = Job.objects.get(name="Command Getter for Device Onboarding") job_kwargs = self.job.prepare_job_kwargs(self.job.job_result.task_kwargs) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 93c4b984..39de3686 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -39,7 +39,7 @@ LOGGER = logging.getLogger(__name__) -name = "Device Onboarding/Network Importer" +name = "Device Onboarding/Network Importer" # pylint: disable=invalid-name class OnboardingTask(Job): # pylint: disable=too-many-instance-attributes @@ -208,7 +208,7 @@ def _parse_credentials(self, credentials): self.secret = settings.NAPALM_ARGS.get("secret", None) -class SSOTDeviceOnboarding(DataSource): +class SSOTDeviceOnboarding(DataSource): # pylint: disable=too-many-instance-attributes """Job for syncing basic device info from a network into Nautobot.""" def __init__(self): @@ -216,7 +216,7 @@ def __init__(self): super().__init__() self.diffsync_flags = DiffSyncFlags.SKIP_UNMATCHED_DST - class Meta: + class Meta: # pylint: disable=too-few-public-methods """Metadata about this Job.""" name = "Sync Devices" @@ -312,7 +312,7 @@ def run( platform, *args, **kwargs, - ): # pylint:disable=arguments-differ + ): # pylint:disable=arguments-differ, too-many-arguments, too-many-locals """Run sync.""" self.dryrun = dryrun self.memory_profiling = memory_profiling @@ -350,7 +350,7 @@ def run( super().run(dryrun, memory_profiling, *args, **kwargs) -class SSOTNetworkImporter(DataSource): +class SSOTNetworkImporter(DataSource): # pylint: disable=too-many-instance-attributes """Job syncing extended device attributes into Nautobot.""" def __init__(self): @@ -358,7 +358,7 @@ def __init__(self): super().__init__() self.diffsync_flags = DiffSyncFlags.SKIP_UNMATCHED_DST - class Meta: + class Meta: # pylint: disable=too-few-public-methods """Metadata about this Job.""" name = "Sync Network Data" @@ -404,7 +404,7 @@ def load_target_adapter(self): def run( self, dryrun, memory_profiling, debug, location, devices, device_role, tag, *args, **kwargs - ): # pylint:disable=arguments-differ + ): # pylint:disable=arguments-differ, disable=too-many-arguments """Run sync.""" self.dryrun = dryrun self.memory_profiling = memory_profiling @@ -483,7 +483,7 @@ def run(self, *args, **kwargs): self.logger.info("%s;\n%s", host, data.dict()) # End # - except Exception as err: + except Exception as err: # pylint: disable=broad-exception-caught self.logger.info("Error: %s", err) return err return final_result From 3af9cc7469915d7c3408c0bb773fe729b4a1aa45 Mon Sep 17 00:00:00 2001 From: David Cates Date: Tue, 30 Jan 2024 09:30:34 -0700 Subject: [PATCH 029/225] flake8 and isort --- .../diffsync/adapters/network_importer_adapters.py | 8 ++++---- .../diffsync/adapters/onboarding_adapters.py | 4 ++-- .../diffsync/models/network_importer_models.py | 3 ++- .../diffsync/models/onboarding_models.py | 3 ++- nautobot_device_onboarding/jobs.py | 14 +++++++------- 5 files changed, 17 insertions(+), 15 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index 7dece7db..7fede25c 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -1,8 +1,8 @@ """DiffSync adapters.""" -import diffsync from nautobot_ssot.contrib import NautobotAdapter +import diffsync from nautobot_device_onboarding.diffsync.models import network_importer_models ####################################### @@ -42,13 +42,13 @@ class FilteredNautobotAdapter(NautobotAdapter): Must be used with FilteredNautobotModel. """ - def _load_objects(self, diffsync_model): # pylint: disable=protected-access + def _load_objects(self, diffsync_model): # pylint: disable=protected-access """Given a diffsync model class, load a list of models from the database and return them.""" parameter_names = self._get_parameter_names(diffsync_model) - for database_object in diffsync_model._get_queryset(diffsync=self): # pylint: disable=protected-access + for database_object in diffsync_model._get_queryset(diffsync=self): # pylint: disable=protected-access self.job.logger.debug( f"LOADING: Database Object: {database_object}, " - f"Model Name: {diffsync_model._modelname}, " # pylint: disable=protected-access + f"Model Name: {diffsync_model._modelname}, " # pylint: disable=protected-access f"Parameter Names: {parameter_names}" ) self._load_single_object(database_object, diffsync_model, parameter_names) diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index 763f65d5..081e80dd 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -2,12 +2,12 @@ import time -import diffsync import netaddr from nautobot.apps.choices import JobResultStatusChoices from nautobot.dcim.models import Device, DeviceType, Manufacturer, Platform from nautobot.extras.models import Job, JobResult +import diffsync from nautobot_device_onboarding.diffsync.models import onboarding_models ####################################### @@ -194,7 +194,7 @@ def execute_command_getter(self): f"The selected platform, {self.job.platform} " "does not have a network driver, please update the Platform." ) - raise Exception("Platform.network_driver missing") # pylint: disable=broad-exception-raised + raise Exception("Platform.network_driver missing") # pylint: disable=broad-exception-raised command_getter_job = Job.objects.get(name="Command Getter for Device Onboarding") job_kwargs = self.job.prepare_job_kwargs(self.job.job_result.task_kwargs) diff --git a/nautobot_device_onboarding/diffsync/models/network_importer_models.py b/nautobot_device_onboarding/diffsync/models/network_importer_models.py index 6f4fdf35..0b66d33f 100644 --- a/nautobot_device_onboarding/diffsync/models/network_importer_models.py +++ b/nautobot_device_onboarding/diffsync/models/network_importer_models.py @@ -2,11 +2,12 @@ from typing import List -from diffsync import DiffSync from nautobot.dcim.models import Device, Interface from nautobot.ipam.models import IPAddress from nautobot_ssot.contrib import NautobotModel +from diffsync import DiffSync + class FilteredNautobotModel(NautobotModel): """ diff --git a/nautobot_device_onboarding/diffsync/models/onboarding_models.py b/nautobot_device_onboarding/diffsync/models/onboarding_models.py index 1f45ac83..879982e0 100644 --- a/nautobot_device_onboarding/diffsync/models/onboarding_models.py +++ b/nautobot_device_onboarding/diffsync/models/onboarding_models.py @@ -3,7 +3,6 @@ import ipaddress from typing import Optional -from diffsync import DiffSyncModel from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist, ValidationError from nautobot.apps.choices import InterfaceTypeChoices, PrefixTypeChoices from nautobot.dcim.models import Device, DeviceType, Interface, Manufacturer, Platform @@ -11,6 +10,8 @@ from nautobot.ipam.models import IPAddress, Prefix from nautobot_ssot.contrib import NautobotModel +from diffsync import DiffSyncModel + class OnboardingDevice(DiffSyncModel): """Diffsync model for device data.""" diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 39de3686..6fab9fdd 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -2,7 +2,6 @@ import logging -from diffsync.enum import DiffSyncFlags from django.conf import settings from nautobot.apps.jobs import BooleanVar, IntegerVar, Job, MultiObjectVar, ObjectVar, StringVar from nautobot.core.celery import register_jobs @@ -14,6 +13,7 @@ from nornir import InitNornir from nornir.core.plugins.inventory import InventoryPluginRegister +from diffsync.enum import DiffSyncFlags from nautobot_device_onboarding.diffsync.adapters.network_importer_adapters import ( NetworkImporterNautobotAdapter, NetworkImporterNetworkAdapter, @@ -39,7 +39,7 @@ LOGGER = logging.getLogger(__name__) -name = "Device Onboarding/Network Importer" # pylint: disable=invalid-name +name = "Device Onboarding/Network Importer" # pylint: disable=invalid-name class OnboardingTask(Job): # pylint: disable=too-many-instance-attributes @@ -208,7 +208,7 @@ def _parse_credentials(self, credentials): self.secret = settings.NAPALM_ARGS.get("secret", None) -class SSOTDeviceOnboarding(DataSource): # pylint: disable=too-many-instance-attributes +class SSOTDeviceOnboarding(DataSource): # pylint: disable=too-many-instance-attributes """Job for syncing basic device info from a network into Nautobot.""" def __init__(self): @@ -216,7 +216,7 @@ def __init__(self): super().__init__() self.diffsync_flags = DiffSyncFlags.SKIP_UNMATCHED_DST - class Meta: # pylint: disable=too-few-public-methods + class Meta: # pylint: disable=too-few-public-methods """Metadata about this Job.""" name = "Sync Devices" @@ -350,7 +350,7 @@ def run( super().run(dryrun, memory_profiling, *args, **kwargs) -class SSOTNetworkImporter(DataSource): # pylint: disable=too-many-instance-attributes +class SSOTNetworkImporter(DataSource): # pylint: disable=too-many-instance-attributes """Job syncing extended device attributes into Nautobot.""" def __init__(self): @@ -358,7 +358,7 @@ def __init__(self): super().__init__() self.diffsync_flags = DiffSyncFlags.SKIP_UNMATCHED_DST - class Meta: # pylint: disable=too-few-public-methods + class Meta: # pylint: disable=too-few-public-methods """Metadata about this Job.""" name = "Sync Network Data" @@ -483,7 +483,7 @@ def run(self, *args, **kwargs): self.logger.info("%s;\n%s", host, data.dict()) # End # - except Exception as err: # pylint: disable=broad-exception-caught + except Exception as err: # pylint: disable=broad-exception-caught self.logger.info("Error: %s", err) return err return final_result From dc1709d717510c63572025f31ece71608fca0656 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Tue, 30 Jan 2024 16:35:14 -0600 Subject: [PATCH 030/225] fix autodetect failing when two ips provided --- nautobot_device_onboarding/jobs.py | 15 +++--- .../utils/inventory_creator.py | 52 +++++++++---------- 2 files changed, 33 insertions(+), 34 deletions(-) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 6fab9fdd..286e9fe3 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -2,6 +2,7 @@ import logging +from diffsync.enum import DiffSyncFlags from django.conf import settings from nautobot.apps.jobs import BooleanVar, IntegerVar, Job, MultiObjectVar, ObjectVar, StringVar from nautobot.core.celery import register_jobs @@ -9,11 +10,6 @@ from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from nautobot.extras.models import Role, SecretsGroup, SecretsGroupAssociation, Status, Tag from nautobot.ipam.models import Namespace -from nautobot_ssot.jobs.base import DataSource -from nornir import InitNornir -from nornir.core.plugins.inventory import InventoryPluginRegister - -from diffsync.enum import DiffSyncFlags from nautobot_device_onboarding.diffsync.adapters.network_importer_adapters import ( NetworkImporterNautobotAdapter, NetworkImporterNetworkAdapter, @@ -30,6 +26,9 @@ from nautobot_device_onboarding.nornir_plays.logger import NornirLogger from nautobot_device_onboarding.nornir_plays.processor import ProcessorDO from nautobot_device_onboarding.utils.inventory_creator import _set_inventory +from nautobot_ssot.jobs.base import DataSource +from nornir import InitNornir +from nornir.core.plugins.inventory import InventoryPluginRegister InventoryPluginRegister.register("empty-inventory", EmptyInventory) @@ -472,9 +471,9 @@ def run(self, *args, **kwargs): }, ) as nornir_obj: nr_with_processors = nornir_obj.with_processors([ProcessorDO(logger, compiled_results)]) - ip_address = self.ip_addresses - inventory_constructed = _set_inventory(ip_address, self.platform, self.port, self.secrets_group) - nr_with_processors.inventory.hosts.update(inventory_constructed) + for entered_ip in self.ip_addresses: + single_host_inventory_constructed = _set_inventory(entered_ip, self.platform, self.port, self.secrets_group) + nr_with_processors.inventory.hosts.update(single_host_inventory_constructed) nr_with_processors.run(task=netmiko_send_commands) final_result = self._process_result(compiled_results, self.ip_addresses) diff --git a/nautobot_device_onboarding/utils/inventory_creator.py b/nautobot_device_onboarding/utils/inventory_creator.py index c530fa70..43cd46ff 100755 --- a/nautobot_device_onboarding/utils/inventory_creator.py +++ b/nautobot_device_onboarding/utils/inventory_creator.py @@ -1,12 +1,12 @@ """Inventory Creator and Helpers.""" from django.conf import settings +from nautobot.dcim.models import Platform from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices +from nautobot_device_onboarding.exceptions import OnboardException from netmiko import SSHDetect from nornir.core.inventory import ConnectionOptions, Host -from nautobot_device_onboarding.exceptions import OnboardException - def _parse_credentials(credentials): """Parse and return dictionary of credentials.""" @@ -60,32 +60,32 @@ def guess_netmiko_device_type(hostname, username, password, port): return guessed_device_type -def _set_inventory(ips, platform, port, secrets_group): +def _set_inventory(host_ip, platform, port, secrets_group): """Construct Nornir Inventory.""" inv = {} username, password, secret = _parse_credentials(secrets_group) - for host_ip in ips: - if platform: - platform = platform.network_driver - else: - platform = guess_netmiko_device_type(host_ip, username, password, port) + if platform: + p = Platform.objects.get(name=platform) + platform = p.network_driver + else: + platform = guess_netmiko_device_type(host_ip, username, password, port) - host = Host( - name=host_ip, - hostname=host_ip, - port=port, - username=username, - password=password, - platform=platform, - connection_options={ - "netmiko": ConnectionOptions( - hostname=host_ip, - port=port, - username=username, - password=password, - platform=platform, - ) - }, - ) - inv.update({host_ip: host}) + host = Host( + name=host_ip, + hostname=host_ip, + port=int(port), + username=username, + password=password, + platform=platform, + connection_options={ + "netmiko": ConnectionOptions( + hostname=host_ip, + port=int(port), + username=username, + password=password, + platform=platform, + ) + }, + ) + inv.update({host_ip: host}) return inv From 4d61ab0bd43c392331323d0af7d117f4b8e79690 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Tue, 30 Jan 2024 16:39:35 -0600 Subject: [PATCH 031/225] fix autodetect failing when two ips provided --- nautobot_device_onboarding/utils/inventory_creator.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/nautobot_device_onboarding/utils/inventory_creator.py b/nautobot_device_onboarding/utils/inventory_creator.py index 43cd46ff..73990995 100755 --- a/nautobot_device_onboarding/utils/inventory_creator.py +++ b/nautobot_device_onboarding/utils/inventory_creator.py @@ -65,8 +65,7 @@ def _set_inventory(host_ip, platform, port, secrets_group): inv = {} username, password, secret = _parse_credentials(secrets_group) if platform: - p = Platform.objects.get(name=platform) - platform = p.network_driver + platform = platform.network_driver else: platform = guess_netmiko_device_type(host_ip, username, password, port) From c393d92f86aa4679a4b97032c0be0f77c83fe10d Mon Sep 17 00:00:00 2001 From: bakebot Date: Wed, 31 Jan 2024 12:58:57 +0000 Subject: [PATCH 032/225] Cookie updated by NetworkToCode Cookie Drift Manager Tool Template: ``` { "template": "https://github.com/nautobot/cookiecutter-nautobot-app.git", "dir": "nautobot-app", "ref": "refs/tags/nautobot-app-v2.1.0", "path": null } ``` Cookie: ``` { "remote": "https://github.com/nautobot/nautobot-app-device-onboarding.git", "path": "/tmp/tmppwc0ifgu/nautobot-app-device-onboarding", "repository_path": "/tmp/tmppwc0ifgu/nautobot-app-device-onboarding", "dir": "", "branch_prefix": "drift-manager", "context": { "codeowner_github_usernames": "@mzbroch @scetron @glennmatthews @chadell", "full_name": "Network to Code, LLC", "email": "info@networktocode.com", "github_org": "nautobot", "app_name": "nautobot_device_onboarding", "verbose_name": "Device Onboarding", "app_slug": "nautobot-device-onboarding", "project_slug": "nautobot-app-device-onboarding", "repo_url": "https://github.com/nautobot/nautobot-app-device-onboarding", "base_url": "nautobot-device-onboarding", "min_nautobot_version": "2.0.3", "max_nautobot_version": "2.9999", "camel_name": "NautobotDeviceOnboarding", "project_short_description": "Device Onboarding", "model_class_name": "None", "open_source_license": "Apache-2.0", "docs_base_url": "https://docs.nautobot.com", "docs_app_url": "https://docs.nautobot.com/projects/device-onboarding/en/latest", "_template": "https://github.com/nautobot/cookiecutter-nautobot-app.git", "_output_dir": "/tmp/tmppwc0ifgu", "_repo_dir": "/github/home/.cookiecutters/cookiecutter-nautobot-app/nautobot-app", "_checkout": "refs/tags/nautobot-app-v2.1.0" }, "base_branch": "develop", "remote_name": "origin", "pull_request_strategy": "PullRequestStrategy.CREATE", "post_actions": [ "PostAction.BLACK" ], "baked_commit_ref": "094719b3173bd24329c250c54c586b699be5f0c1", "draft": true } ``` CLI Arguments: ``` { "cookie_dir": "", "input": false, "json_filename": "", "output_dir": "", "push": true, "template": "", "template_dir": "", "template_ref": "refs/tags/nautobot-app-v2.1.0", "pull_request": null, "post_action": [], "disable_post_actions": false, "draft": null } ``` --- .cookiecutter.json | 4 +- .github/CODEOWNERS | 2 +- .../pull_request_template.md | 2 +- .github/workflows/ci.yml | 63 +++++-- .github/workflows/rebake.yml | 118 ------------- .github/workflows/upstream_testing.yml | 2 +- LICENSE | 2 +- README.md | 14 +- changes/.gitignore | 1 + development/Dockerfile | 4 +- development/docker-compose.base.yml | 6 +- development/docker-compose.dev.yml | 12 ++ development/nautobot_config.py | 3 - development/towncrier_template.j2 | 30 ++++ docs/admin/compatibility_matrix.md | 11 +- docs/admin/install.md | 48 +----- docs/admin/release_notes/version_1.0.md | 25 +++ docs/admin/uninstall.md | 10 +- docs/admin/upgrade.md | 5 +- docs/assets/extra.css | 5 + docs/dev/arch_decision.md | 7 + docs/dev/contributing.md | 33 +++- docs/dev/dev_environment.md | 24 +-- docs/user/app_overview.md | 3 +- nautobot_device_onboarding/__init__.py | 4 +- .../migrations/__init__.py | 0 .../tests/test_basic.py | 12 -- pyproject.toml | 121 +++++++++++--- tasks.py | 158 +++++++++++++++--- 29 files changed, 458 insertions(+), 271 deletions(-) delete mode 100644 .github/workflows/rebake.yml create mode 100644 changes/.gitignore create mode 100644 development/towncrier_template.j2 create mode 100644 docs/dev/arch_decision.md delete mode 100644 nautobot_device_onboarding/migrations/__init__.py diff --git a/.cookiecutter.json b/.cookiecutter.json index c41fcdfa..a8b2ca93 100644 --- a/.cookiecutter.json +++ b/.cookiecutter.json @@ -21,7 +21,7 @@ "_drift_manager": { "template": "https://github.com/nautobot/cookiecutter-nautobot-app.git", "template_dir": "nautobot-app", - "template_ref": "develop", + "template_ref": "refs/tags/nautobot-app-v2.1.0", "cookie_dir": "", "branch_prefix": "drift-manager", "pull_request_strategy": "create", @@ -29,7 +29,7 @@ "black" ], "draft": true, - "baked_commit_ref": "094719b3173bd24329c250c54c586b699be5f0c1" + "baked_commit_ref": "0af4c3b9796ef8fcd806cf023936daaf7e978e5d" } } } \ No newline at end of file diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index f2369400..2fe7aeb8 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,2 +1,2 @@ -# Default owners for all files in this repository +# Default owner(s) of all files in this repository * @mzbroch @scetron @glennmatthews @chadell diff --git a/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md b/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md index b1eba070..162c9902 100644 --- a/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md +++ b/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md @@ -1,5 +1,5 @@ " +issue_format = "[#{issue}](https://github.com/nautobot/nautobot-app-device-onboarding/issues/{issue})" + +[[tool.towncrier.type]] +directory = "security" +name = "Security" +showcontent = true + +[[tool.towncrier.type]] +directory = "added" +name = "Added" +showcontent = true + +[[tool.towncrier.type]] +directory = "changed" +name = "Changed" +showcontent = true + +[[tool.towncrier.type]] +directory = "deprecated" +name = "Deprecated" +showcontent = true + +[[tool.towncrier.type]] +directory = "removed" +name = "Removed" +showcontent = true + +[[tool.towncrier.type]] +directory = "fixed" +name = "Fixed" +showcontent = true + +[[tool.towncrier.type]] +directory = "dependencies" +name = "Dependencies" +showcontent = true + +[[tool.towncrier.type]] +directory = "documentation" +name = "Documentation" +showcontent = true + +[[tool.towncrier.type]] +directory = "housekeeping" +name = "Housekeeping" +showcontent = true diff --git a/tasks.py b/tasks.py index 631063d5..192aa939 100644 --- a/tasks.py +++ b/tasks.py @@ -13,6 +13,8 @@ """ import os +from pathlib import Path +from time import sleep from invoke.collection import Collection from invoke.tasks import task as invoke_task @@ -21,7 +23,8 @@ def is_truthy(arg): """Convert "truthy" strings into Booleans. - Examples: + Examples + -------- >>> is_truthy('yes') True Args: @@ -67,6 +70,25 @@ def _is_compose_included(context, name): return f"docker-compose.{name}.yml" in context.nautobot_device_onboarding.compose_files +def _await_healthy_service(context, service): + container_id = docker_compose(context, f"ps -q -- {service}", pty=False, echo=False, hide=True).stdout.strip() + _await_healthy_container(context, container_id) + + +def _await_healthy_container(context, container_id): + while True: + result = context.run( + "docker inspect --format='{{.State.Health.Status}}' " + container_id, + pty=False, + echo=False, + hide=True, + ) + if result.stdout.strip() == "healthy": + break + print(f"Waiting for `{container_id}` container to become healthy ...") + sleep(1) + + def task(function=None, *args, **kwargs): """Task decorator to override the default Invoke task decorator and add each task to the invoke namespace.""" @@ -90,6 +112,7 @@ def docker_compose(context, command, **kwargs): """Helper function for running a specific docker compose command with all appropriate parameters and environment. Args: + ---- context (obj): Used to run specific commands command (str): Command string to append to the "docker compose ..." command, such as "build", "up", etc. **kwargs: Passed through to the context.run() call. @@ -216,11 +239,46 @@ def stop(context, service=""): docker_compose(context, "stop" if service else "down --remove-orphans", service=service) -@task -def destroy(context): +@task( + aliases=("down",), + help={ + "volumes": "Remove Docker compose volumes (default: True)", + "import-db-file": "Import database from `import-db-file` file into the fresh environment (default: empty)", + }, +) +def destroy(context, volumes=True, import_db_file=""): """Destroy all containers and volumes.""" print("Destroying Nautobot...") - docker_compose(context, "down --remove-orphans --volumes") + docker_compose(context, f"down --remove-orphans {'--volumes' if volumes else ''}") + + if not import_db_file: + return + + if not volumes: + raise ValueError("Cannot specify `--no-volumes` and `--import-db-file` arguments at the same time.") + + print(f"Importing database file: {import_db_file}...") + + input_path = Path(import_db_file).absolute() + if not input_path.is_file(): + raise ValueError(f"File not found: {input_path}") + + command = [ + "run", + "--rm", + "--detach", + f"--volume='{input_path}:/docker-entrypoint-initdb.d/dump.sql'", + "--", + "db", + ] + + container_id = docker_compose(context, " ".join(command), pty=False, echo=False, hide=True).stdout.strip() + _await_healthy_container(context, container_id) + print("Stopping database container...") + context.run(f"docker stop {container_id}", pty=False, echo=False, hide=True) + + print("Database import complete, you can start Nautobot with the following command:") + print("invoke start") @task @@ -424,27 +482,43 @@ def dbshell(context, db_name="", input_file="", output_file="", query=""): @task( help={ + "db-name": "Database name to create (default: Nautobot database)", "input-file": "SQL dump file to replace the existing database with. This can be generated using `invoke backup-db` (default: `dump.sql`).", } ) -def import_db(context, input_file="dump.sql"): - """Stop Nautobot containers and replace the current database with the dump into the running `db` container.""" - docker_compose(context, "stop -- nautobot worker") +def import_db(context, db_name="", input_file="dump.sql"): + """Stop Nautobot containers and replace the current database with the dump into `db` container.""" + docker_compose(context, "stop -- nautobot worker beat") + start(context, "db") + _await_healthy_service(context, "db") command = ["exec -- db sh -c '"] if _is_compose_included(context, "mysql"): + if not db_name: + db_name = "$MYSQL_DATABASE" command += [ + "mysql --user root --password=$MYSQL_ROOT_PASSWORD", + '--execute="', + f"DROP DATABASE IF EXISTS {db_name};", + f"CREATE DATABASE {db_name};", + "" + if db_name == "$MYSQL_DATABASE" + else f"GRANT ALL PRIVILEGES ON {db_name}.* TO $MYSQL_USER; FLUSH PRIVILEGES;", + '"', + "&&", "mysql", - "--database=$MYSQL_DATABASE", + f"--database={db_name}", "--user=$MYSQL_USER", "--password=$MYSQL_PASSWORD", ] elif _is_compose_included(context, "postgres"): + if not db_name: + db_name = "$POSTGRES_DB" command += [ - "psql", - "--username=$POSTGRES_USER", - "postgres", + f"dropdb --if-exists --user=$POSTGRES_USER {db_name} &&", + f"createdb --user=$POSTGRES_USER {db_name} &&", + f"psql --user=$POSTGRES_USER --dbname={db_name}", ] else: raise ValueError("Unsupported database backend.") @@ -467,7 +541,10 @@ def import_db(context, input_file="dump.sql"): } ) def backup_db(context, db_name="", output_file="dump.sql", readable=True): - """Dump database into `output_file` file from running `db` container.""" + """Dump database into `output_file` file from `db` container.""" + start(context, "db") + _await_healthy_service(context, "db") + command = ["exec -- db sh -c '"] if _is_compose_included(context, "mysql"): @@ -475,17 +552,12 @@ def backup_db(context, db_name="", output_file="dump.sql", readable=True): "mysqldump", "--user=root", "--password=$MYSQL_ROOT_PASSWORD", - "--add-drop-database", "--skip-extended-insert" if readable else "", - "--databases", db_name if db_name else "$MYSQL_DATABASE", ] elif _is_compose_included(context, "postgres"): command += [ "pg_dump", - "--clean", - "--create", - "--if-exists", "--username=$POSTGRES_USER", f"--dbname={db_name or '$POSTGRES_DB'}", "--inserts" if readable else "", @@ -542,6 +614,19 @@ def help_task(context): context.run(f"invoke {task_name} --help") +@task( + help={ + "version": "Version of Device Onboarding to generate the release notes for.", + } +) +def generate_release_notes(context, version=""): + """Generate Release Notes using Towncrier.""" + command = "env DJANGO_SETTINGS_MODULE=nautobot.core.settings towncrier build" + if version: + command += f" --version {version}" + run_command(context, command) + + # ------------------------------------------------------------------------------ # TESTS # ------------------------------------------------------------------------------ @@ -585,12 +670,34 @@ def pylint(context): run_command(context, command) -@task -def pydocstyle(context): - """Run pydocstyle to validate docstring formatting adheres to NTC defined standards.""" - # We exclude the /migrations/ directory since it is autogenerated code - command = "pydocstyle ." - run_command(context, command) +@task(aliases=("a",)) +def autoformat(context): + """Run code autoformatting.""" + black(context, autoformat=True) + ruff(context, action="both", fix=True) + + +@task( + help={ + "action": "One of 'lint', 'format', or 'both'", + "fix": "Automatically fix selected action. May not be able to fix all.", + "output_format": "see https://docs.astral.sh/ruff/settings/#output-format", + }, +) +def ruff(context, action="lint", fix=False, output_format="text"): + """Run ruff to perform code formatting and/or linting.""" + if action != "lint": + command = "ruff format" + if not fix: + command += " --check" + command += " ." + run_command(context, command) + if action != "format": + command = "ruff check" + if fix: + command += " --fix" + command += f" --output-format {output_format} ." + run_command(context, command) @task @@ -605,6 +712,7 @@ def yamllint(context): """Run yamllint to validate formatting adheres to NTC defined YAML standards. Args: + ---- context (obj): Used to run specific commands """ command = "yamllint . --format standard" @@ -679,12 +787,12 @@ def tests(context, failfast=False, keepdb=False, lint_only=False): # Sorted loosely from fastest to slowest print("Running black...") black(context) + print("Running ruff...") + ruff(context) print("Running flake8...") flake8(context) print("Running bandit...") bandit(context) - print("Running pydocstyle...") - pydocstyle(context) print("Running yamllint...") yamllint(context) print("Running poetry check...") From 03eaa75382631230406da61ecbd97fed0db8a05c Mon Sep 17 00:00:00 2001 From: Jan Snasel Date: Wed, 31 Jan 2024 13:20:15 +0000 Subject: [PATCH 033/225] chore: Manual fixes --- .github/workflows/ci.yml | 6 +-- README.md | 12 ++--- docs/admin/compatibility_matrix.md | 11 ++-- docs/admin/install.md | 50 ++++++++++++++++--- docs/admin/release_notes/version_1.0.md | 25 ---------- docs/admin/upgrade.md | 3 -- docs/dev/arch_decision.md | 7 --- docs/user/app_overview.md | 3 +- .../migrations/__init__.py | 0 nautobot_device_onboarding/nautobot_keeper.py | 3 +- pyproject.toml | 3 +- tasks.py | 5 +- 12 files changed, 60 insertions(+), 68 deletions(-) delete mode 100644 docs/dev/arch_decision.md create mode 100644 nautobot_device_onboarding/migrations/__init__.py diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index bd9e8fd4..b5de0f34 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -193,9 +193,9 @@ jobs: - python-version: "3.11" db-backend: "postgresql" nautobot-version: "2.0.3" - - python-version: "3.11" - db-backend: "mysql" - nautobot-version: "stable" + # - python-version: "3.11" + # db-backend: "mysql" + # nautobot-version: "stable" runs-on: "ubuntu-22.04" env: INVOKE_NAUTOBOT_DEVICE_ONBOARDING_PYTHON_VER: "${{ matrix.python-version }}" diff --git a/README.md b/README.md index 89cc5b8a..cc2b1beb 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,10 @@ # Nautobot Device Onboarding

- +
- - + +
@@ -21,11 +21,7 @@ Regardless, the Onboarding App greatly simplifies the onboarding process by allo Device Onboarding is a Job that allows you to provide a few required pieces of information and onboard the device. -> Developer Note: Place the files in the `docs/images/` folder and link them using only full URLs from GitHub, for example: `![Overview](https://raw.githubusercontent.com/nautobot/nautobot-app-device-onboarding/develop/docs/images/app-overview.png)`. This absolute static linking is required to ensure the README renders properly in GitHub, the docs site, and any other external sites like PyPI. - -More screenshots can be found in the [Using the App](https://docs.nautobot.com/projects/device-onboarding/en/latest/user/app_use_cases/) page in the documentation. Here's a quick overview of some of the app's added functionality: - -![](https://raw.githubusercontent.com/nautobot/nautobot-app-device-onboarding/develop/docs/images/placeholder.png) +![job input](https://raw.githubusercontent.com/nautobot/nautobot-app-device-onboarding/develop/docs/images/do_job_inputs.png) ## Try it out! diff --git a/docs/admin/compatibility_matrix.md b/docs/admin/compatibility_matrix.md index fce8a9e0..d7cf5d30 100644 --- a/docs/admin/compatibility_matrix.md +++ b/docs/admin/compatibility_matrix.md @@ -1,8 +1,9 @@ # Compatibility Matrix -!!! warning "Developer Note - Remove Me!" - Explain how the release models of the app and of Nautobot work together, how releases are supported, how features and older releases are deprecated etc. - | Device Onboarding Version | Nautobot First Support Version | Nautobot Last Support Version | -| ------------- | -------------------- | ------------- | -| 1.0.X | 2.0.3 | 2.99.99 | +| ------------------------- | ------------------------------ | ----------------------------- | +| 1.0.1 | 1.0.0 | 1.1.99 | +| 1.1.2 | 1.2.0 | 1.99 | +| 1.2.0 | 1.4.0 | 1.99 | +| 2.0.0 | 2.0.0 | 2.99 | +| 3.0.0 | 2.0.3 | 2.99 | diff --git a/docs/admin/install.md b/docs/admin/install.md index cd048852..711a5934 100644 --- a/docs/admin/install.md +++ b/docs/admin/install.md @@ -76,12 +76,46 @@ sudo systemctl restart nautobot nautobot-worker nautobot-scheduler ## App Configuration -Although the plugin can run without providing any settings, the plugin behavior can be controlled with the following list of settings defined in `nautobot_config.py`: +Although the app can run without providing any settings, the app behavior can be controlled with the following list of settings defined in `nautobot_config.py`: + +- `create_platform_if_missing` boolean (default True). If True, a new platform object will be created if the platform discovered by netmiko do not already exist and is in the list of supported platforms (`cisco_ios`, `cisco_nxos`, `arista_eos`, `juniper_junos`, `cisco_xr`) +- `create_device_type_if_missing` boolean (default True), If True, a new device type object will be created if the model discovered by Napalm do not match an existing device type. +- `create_manufacturer_if_missing` boolean (default True), If True, a new manufacturer object will be created if the manufacturer discovered by Napalm is do not match an existing manufacturer, this option is only valid if `create_device_type_if_missing` is True as well. +- `create_device_role_if_missing` boolean (default True), If True, a new device role object will be created if the device role provided was not provided as part of the onboarding and if the `default_device_role` do not already exist. +- `create_management_interface_if_missing` boolean (default True), If True, add management interface and IP address to the device. If False no management interfaces will be created, nor will the IP address be added to Nautobot, while the device will still get added. +- `default_device_status` string (default "Active"), status assigned to a new device by default. +- `default_ip_status` string (default "Active"), status assigned to a new device management IP. +- `default_device_role` string (default "network") +- `default_device_role_color` string (default FF0000), color assigned to the device role if it needs to be created. +- `default_management_interface` string (default "PLACEHOLDER"), name of the management interface that will be created, if one can't be identified on the device. +- `default_management_prefix_length` integer ( default 0), length of the prefix that will be used for the management IP address, if the IP can't be found. +- `skip_device_type_on_update` boolean (default False), If True, an existing Nautobot device will not get its device type updated. If False, device type will be updated with one discovered on a device. +- `skip_manufacturer_on_update` boolean (default False), If True, an existing Nautobot device will not get its manufacturer updated. If False, manufacturer will be updated with one discovered on a device. +- `assign_secrets_group` boolean (default False), If True, the credentials used to connect to the device will be assigned as the secrets group for the device upon creation. If False, no secrets group will be assigned. +- `set_management_only_interface` boolean (default False), If True, the interface that is created or updated will be set to management only. If False, the interface will be set to not be management only. +- `platform_map` (dictionary), mapping of an **auto-detected** Netmiko platform to the **Nautobot slug** name of your Platform. The dictionary should be in the format: + ```python + { + : + } + ``` +- `onboarding_extensions_map` (dictionary), mapping of a NAPALM driver name to the loadable Python module used as an onboarding extension. The dictionary should be in the format: + ```python + { + : + } + ``` +- `object_match_strategy` (string), defines the method for searching models. There are currently two strategies, strict and loose. Strict has to be a direct match, normally using a slug. Loose allows a range of search criteria to match a single object. If multiple objects are returned an error is raised. + +Modify `nautobot_config.py` with settings of your choice. Example settings are shown below: -The app behavior can be controlled with the following list of settings: - -| Key | Example | Default | Description | -| ------- | ------ | -------- | ------------------------------------- | -| `enable_backup` | `True` | `True` | A boolean to represent whether or not to run backup configurations within the app. | -| `platform_slug_map` | `{"cisco_wlc": "cisco_aireos"}` | `None` | A dictionary in which the key is the platform slug and the value is what netutils uses in any "network_os" parameter. | -| `per_feature_bar_width` | `0.15` | `0.15` | The width of the table bar within the overview report | +```python +# Example settings In your nautobot_config.py +PLUGINS_CONFIG = { + "nautobot_device_onboarding": { + "default_ip_status": "Active", + "default_device_role": "leaf", + "skip_device_type_on_update": True, + } +} +``` diff --git a/docs/admin/release_notes/version_1.0.md b/docs/admin/release_notes/version_1.0.md index aa6fad24..5daceedb 100644 --- a/docs/admin/release_notes/version_1.0.md +++ b/docs/admin/release_notes/version_1.0.md @@ -10,28 +10,3 @@ ### Additional Changes -This document describes all new features and changes in the release `1.0`. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). - -## Release Overview - -- Major features or milestones -- Achieved in this `x.y` release -- Changes to compatibility with Nautobot and/or other apps, libraries etc. - -## [v1.0.1] - 2021-09-08 - -### Added - -### Changed - -### Fixed - -- [#123](https://github.com/nautobot/nautobot-app-device-onboarding/issues/123) Fixed Tag filtering not working in job launch form - -## [v1.0.0] - 2021-08-03 - -### Added - -### Changed - -### Fixed diff --git a/docs/admin/upgrade.md b/docs/admin/upgrade.md index 8fcbf03a..5bec1594 100644 --- a/docs/admin/upgrade.md +++ b/docs/admin/upgrade.md @@ -4,7 +4,4 @@ Here you will find any steps necessary to upgrade the App in your Nautobot envir ## Upgrade Guide -!!! warning "Developer Note - Remove Me!" - Add more detailed steps on how the app is upgraded in an existing Nautobot setup and any version specifics (such as upgrading between major versions with breaking changes). - When a new release comes out it may be necessary to run a migration of the database to account for any changes in the data models used by this app. Execute the command `nautobot-server post-upgrade` within the runtime environment of your Nautobot installation after updating the `nautobot-device-onboarding` package via `pip`. diff --git a/docs/dev/arch_decision.md b/docs/dev/arch_decision.md deleted file mode 100644 index 6a68035d..00000000 --- a/docs/dev/arch_decision.md +++ /dev/null @@ -1,7 +0,0 @@ -# Architecture Decision Records - -The intention is to document deviations from a standard Model View Controller (MVC) design. - -!!! warning "Developer Note - Remove Me!" - Optional page, remove if not applicable. - For examples see [Golden Config](https://github.com/nautobot/nautobot-plugin-golden-config/tree/develop/docs/dev/dev_adr.md). diff --git a/docs/user/app_overview.md b/docs/user/app_overview.md index 6c1bb60d..255849ac 100644 --- a/docs/user/app_overview.md +++ b/docs/user/app_overview.md @@ -50,8 +50,7 @@ Existing Nautobot users may want to incorporate the Onboarding App as part of on @mzb and @dgarros -!!! warning "Developer Note - Remove Me!" - What is shown today in the Installed Apps page in Nautobot. What parts of Nautobot does it interact with, what does it add etc. ? +### Maintainers - @mzb - @glennmatthews diff --git a/nautobot_device_onboarding/migrations/__init__.py b/nautobot_device_onboarding/migrations/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/nautobot_device_onboarding/nautobot_keeper.py b/nautobot_device_onboarding/nautobot_keeper.py index 079a5ed7..062bb52f 100644 --- a/nautobot_device_onboarding/nautobot_keeper.py +++ b/nautobot_device_onboarding/nautobot_keeper.py @@ -297,8 +297,7 @@ def ensure_device_platform(self, create_platform_if_missing=PLUGIN_SETTINGS["cre """Get platform object from Nautobot filtered by platform_slug. Args: - platform_slug (string): slug of a platform object present in Nautobot, object will be created if not present - and create_platform_if_missing is enabled + create_platform_if_missing (bool): Flag to indicate if we need to create the platform, if not already present Return: nautobot.dcim.models.Platform object diff --git a/pyproject.toml b/pyproject.toml index acc591ca..895c166e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,7 +27,8 @@ include = [ [tool.poetry.dependencies] python = ">=3.8,<3.12" -# Used for local development +napalm = ">=2.5.0, <5" +zipp = "^3.4.0" nautobot = "^2.0.3" [tool.poetry.group.dev.dependencies] diff --git a/tasks.py b/tasks.py index 192aa939..150a5745 100644 --- a/tasks.py +++ b/tasks.py @@ -23,8 +23,7 @@ def is_truthy(arg): """Convert "truthy" strings into Booleans. - Examples - -------- + Examples: >>> is_truthy('yes') True Args: @@ -112,7 +111,6 @@ def docker_compose(context, command, **kwargs): """Helper function for running a specific docker compose command with all appropriate parameters and environment. Args: - ---- context (obj): Used to run specific commands command (str): Command string to append to the "docker compose ..." command, such as "build", "up", etc. **kwargs: Passed through to the context.run() call. @@ -712,7 +710,6 @@ def yamllint(context): """Run yamllint to validate formatting adheres to NTC defined YAML standards. Args: - ---- context (obj): Used to run specific commands """ command = "yamllint . --format standard" From 63cad44e6a860bee18ae5133e9a1e2030ae1d00d Mon Sep 17 00:00:00 2001 From: Jan Snasel Date: Wed, 31 Jan 2024 13:20:22 +0000 Subject: [PATCH 034/225] chore: Poetry lock --- poetry.lock | 521 ++++++++++++++++++++++++++++++++-------------------- 1 file changed, 322 insertions(+), 199 deletions(-) diff --git a/poetry.lock b/poetry.lock index f91e0c4e..6d618e39 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "amqp" version = "5.2.0" description = "Low-level AMQP client for Python (fork of amqplib)." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -19,7 +18,6 @@ vine = ">=5.0.0,<6.0.0" name = "aniso8601" version = "7.0.0" description = "A library for parsing ISO 8601 strings." -category = "main" optional = false python-versions = "*" files = [ @@ -27,11 +25,21 @@ files = [ {file = "aniso8601-7.0.0.tar.gz", hash = "sha256:513d2b6637b7853806ae79ffaca6f3e8754bdd547048f5ccc1420aec4b714f1e"}, ] +[[package]] +name = "appnope" +version = "0.1.3" +description = "Disable App Nap on macOS >= 10.9" +optional = false +python-versions = "*" +files = [ + {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, + {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, +] + [[package]] name = "asgiref" version = "3.7.2" description = "ASGI specs, helper code, and adapters" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -49,7 +57,6 @@ tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] name = "astroid" version = "2.15.8" description = "An abstract syntax tree for Python with inference support." -category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -65,11 +72,28 @@ wrapt = [ {version = ">=1.14,<2", markers = "python_version >= \"3.11\""}, ] +[[package]] +name = "asttokens" +version = "2.4.1" +description = "Annotate AST trees with source code positions" +optional = false +python-versions = "*" +files = [ + {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, + {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, +] + +[package.dependencies] +six = ">=1.12.0" + +[package.extras] +astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] +test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] + [[package]] name = "async-timeout" version = "4.0.3" description = "Timeout context manager for asyncio programs" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -81,7 +105,6 @@ files = [ name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -96,11 +119,21 @@ docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib- tests = ["attrs[tests-no-zope]", "zope-interface"] tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +[[package]] +name = "backcall" +version = "0.2.0" +description = "Specifications for callback functions passed in to an API" +optional = false +python-versions = "*" +files = [ + {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, + {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, +] + [[package]] name = "backports-zoneinfo" version = "0.2.1" description = "Backport of the standard library zoneinfo module" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -132,7 +165,6 @@ tzdata = ["tzdata"] name = "bandit" version = "1.7.5" description = "Security oriented static analyser for python code." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -156,7 +188,6 @@ yaml = ["PyYAML"] name = "bcrypt" version = "4.0.1" description = "Modern password hashing for your software and your servers" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -191,7 +222,6 @@ typecheck = ["mypy"] name = "billiard" version = "4.2.0" description = "Python multiprocessing fork with improvements and bugfixes" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -203,7 +233,6 @@ files = [ name = "black" version = "23.11.0" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -246,7 +275,6 @@ uvloop = ["uvloop (>=0.15.2)"] name = "celery" version = "5.3.5" description = "Distributed Task Queue." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -303,7 +331,6 @@ zstd = ["zstandard (==0.22.0)"] name = "certifi" version = "2023.11.17" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -315,7 +342,6 @@ files = [ name = "cffi" version = "1.16.0" description = "Foreign Function Interface for Python calling C code." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -380,7 +406,6 @@ pycparser = "*" name = "charset-normalizer" version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -480,7 +505,6 @@ files = [ name = "click" version = "8.1.7" description = "Composable command line interface toolkit" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -491,11 +515,27 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} +[[package]] +name = "click-default-group" +version = "1.2.4" +description = "click_default_group" +optional = false +python-versions = ">=2.7" +files = [ + {file = "click_default_group-1.2.4-py2.py3-none-any.whl", hash = "sha256:9b60486923720e7fc61731bdb32b617039aba820e22e1c88766b1125592eaa5f"}, + {file = "click_default_group-1.2.4.tar.gz", hash = "sha256:eb3f3c99ec0d456ca6cd2a7f08f7d4e91771bef51b01bdd9580cc6450fe1251e"}, +] + +[package.dependencies] +click = "*" + +[package.extras] +test = ["pytest"] + [[package]] name = "click-didyoumean" version = "0.3.0" description = "Enables git-like *did-you-mean* feature in click" -category = "main" optional = false python-versions = ">=3.6.2,<4.0.0" files = [ @@ -510,7 +550,6 @@ click = ">=7" name = "click-plugins" version = "1.1.1" description = "An extension module for click to enable registering CLI commands via setuptools entry-points." -category = "main" optional = false python-versions = "*" files = [ @@ -528,7 +567,6 @@ dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] name = "click-repl" version = "0.3.0" description = "REPL plugin for Click" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -547,7 +585,6 @@ testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -559,7 +596,6 @@ files = [ name = "coverage" version = "7.3.2" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -624,7 +660,6 @@ toml = ["tomli"] name = "cron-descriptor" version = "1.4.0" description = "A Python library that converts cron expressions into human readable strings." -category = "main" optional = false python-versions = "*" files = [ @@ -638,7 +673,6 @@ dev = ["polib"] name = "cryptography" version = "41.0.5" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -680,11 +714,21 @@ ssh = ["bcrypt (>=3.1.5)"] test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +optional = false +python-versions = ">=3.5" +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + [[package]] name = "defusedxml" version = "0.7.1" description = "XML bomb protection for Python stdlib modules" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -696,7 +740,6 @@ files = [ name = "dill" version = "0.3.7" description = "serialize all of Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -711,7 +754,6 @@ graph = ["objgraph (>=1.7.2)"] name = "django" version = "3.2.23" description = "A high-level Python Web framework that encourages rapid development and clean, pragmatic design." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -732,7 +774,6 @@ bcrypt = ["bcrypt"] name = "django-ajax-tables" version = "1.1.1" description = "Django tag for ajax-enabled tables" -category = "main" optional = false python-versions = "*" files = [ @@ -744,7 +785,6 @@ files = [ name = "django-celery-beat" version = "2.5.0" description = "Database-backed Periodic Tasks." -category = "main" optional = false python-versions = "*" files = [ @@ -765,7 +805,6 @@ tzdata = "*" name = "django-celery-results" version = "2.4.0" description = "Celery result backends for Django." -category = "main" optional = false python-versions = "*" files = [ @@ -780,7 +819,6 @@ celery = ">=5.2.3,<6.0" name = "django-constance" version = "2.9.1" description = "Django live settings with pluggable backends, including Redis." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -799,7 +837,6 @@ redis = ["redis"] name = "django-cors-headers" version = "4.2.0" description = "django-cors-headers is a Django application for handling the server headers required for Cross-Origin Resource Sharing (CORS)." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -814,7 +851,6 @@ Django = ">=3.2" name = "django-db-file-storage" version = "0.5.5" description = "Custom FILE_STORAGE for Django. Saves files in your database instead of your file system." -category = "main" optional = false python-versions = "*" files = [ @@ -828,7 +864,6 @@ Django = "*" name = "django-debug-toolbar" version = "4.2.0" description = "A configurable set of panels that display various debug information about the current request/response." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -844,7 +879,6 @@ sqlparse = ">=0.2" name = "django-extensions" version = "3.2.3" description = "Extensions for Django" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -859,7 +893,6 @@ Django = ">=3.2" name = "django-filter" version = "23.1" description = "Django-filter is a reusable Django application for allowing users to filter querysets dynamically." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -874,7 +907,6 @@ Django = ">=3.2" name = "django-health-check" version = "3.17.0" description = "Run checks on services like databases, queue servers, celery processes, etc." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -893,7 +925,6 @@ test = ["celery", "pytest", "pytest-cov", "pytest-django", "redis"] name = "django-jinja" version = "2.10.2" description = "Jinja2 templating language integrated in Django." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -909,7 +940,6 @@ jinja2 = ">=3" name = "django-picklefield" version = "3.1" description = "Pickled object field for Django" -category = "main" optional = false python-versions = ">=3" files = [ @@ -927,7 +957,6 @@ tests = ["tox"] name = "django-prometheus" version = "2.3.1" description = "Django middlewares to monitor your application with Prometheus.io." -category = "main" optional = false python-versions = "*" files = [ @@ -942,7 +971,6 @@ prometheus-client = ">=0.7" name = "django-redis" version = "5.3.0" description = "Full featured redis cache backend for Django." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -961,7 +989,6 @@ hiredis = ["redis[hiredis] (>=3,!=4.0.0,!=4.0.1)"] name = "django-tables2" version = "2.6.0" description = "Table/data-grid framework for Django" -category = "main" optional = false python-versions = "*" files = [ @@ -979,7 +1006,6 @@ tablib = ["tablib"] name = "django-taggit" version = "4.0.0" description = "django-taggit is a reusable Django application for simple tagging." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -994,7 +1020,6 @@ Django = ">=3.2" name = "django-timezone-field" version = "5.1" description = "A Django app providing DB, form, and REST framework fields for zoneinfo and pytz timezone objects." -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -1004,14 +1029,13 @@ files = [ [package.dependencies] "backports.zoneinfo" = {version = ">=0.2.1,<0.3.0", markers = "python_version < \"3.9\""} -Django = ">=2.2,<3.0.0 || >=3.2.0,<5.0" +Django = ">=2.2,<3.0.dev0 || >=3.2.dev0,<5.0" pytz = "*" [[package]] name = "django-tree-queries" version = "0.15.0" description = "Tree queries with explicit opt-in, without configurability" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1026,7 +1050,6 @@ tests = ["coverage"] name = "django-webserver" version = "1.2.0" description = "Django management commands for production webservers" -category = "main" optional = false python-versions = "*" files = [ @@ -1048,7 +1071,6 @@ waitress = ["waitress"] name = "djangorestframework" version = "3.14.0" description = "Web APIs for Django, made easy." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1064,7 +1086,6 @@ pytz = "*" name = "drf-react-template-framework" version = "0.0.17" description = "Django REST Framework plugin that creates form schemas for react-jsonschema-form" -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -1079,7 +1100,6 @@ djangorestframework = ">=3.12.0,<4.0.0" name = "drf-spectacular" version = "0.26.3" description = "Sane and flexible OpenAPI 3 schema generation for Django REST framework" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1104,7 +1124,6 @@ sidecar = ["drf-spectacular-sidecar"] name = "drf-spectacular-sidecar" version = "2023.10.1" description = "Serve self-contained distribution builds of Swagger UI and Redoc with Django" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1119,7 +1138,6 @@ Django = ">=2.2" name = "emoji" version = "2.8.0" description = "Emoji for Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1130,11 +1148,24 @@ files = [ [package.extras] dev = ["coverage", "coveralls", "pytest"] +[[package]] +name = "executing" +version = "2.0.1" +description = "Get the currently executing AST node of a frame, and other information" +optional = false +python-versions = ">=3.5" +files = [ + {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, + {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, +] + +[package.extras] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] + [[package]] name = "flake8" version = "5.0.4" description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" optional = false python-versions = ">=3.6.1" files = [ @@ -1151,7 +1182,6 @@ pyflakes = ">=2.5.0,<2.6.0" name = "future" version = "0.18.3" description = "Clean single-source support for Python 3 and 2" -category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1162,7 +1192,6 @@ files = [ name = "ghp-import" version = "2.1.0" description = "Copy your docs directly to the gh-pages branch." -category = "dev" optional = false python-versions = "*" files = [ @@ -1180,7 +1209,6 @@ dev = ["flake8", "markdown", "twine", "wheel"] name = "gitdb" version = "4.0.11" description = "Git Object Database" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1195,7 +1223,6 @@ smmap = ">=3.0.1,<6" name = "gitpython" version = "3.1.40" description = "GitPython is a Python library used to interact with Git repositories" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1213,7 +1240,6 @@ test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre name = "graphene" version = "2.1.9" description = "GraphQL Framework for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -1236,7 +1262,6 @@ test = ["coveralls", "fastdiff (==0.2.0)", "iso8601", "mock", "promise", "pytest name = "graphene-django" version = "2.16.0" description = "Graphene Django integration" -category = "main" optional = false python-versions = "*" files = [ @@ -1261,7 +1286,6 @@ test = ["coveralls", "django-filter (>=2)", "djangorestframework (>=3.6.3)", "mo name = "graphene-django-optimizer" version = "0.8.0" description = "Optimize database access inside graphene queries." -category = "main" optional = false python-versions = "*" files = [ @@ -1272,7 +1296,6 @@ files = [ name = "graphql-core" version = "2.3.2" description = "GraphQL implementation for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -1293,7 +1316,6 @@ test = ["coveralls (==1.11.1)", "cython (==0.29.17)", "gevent (==1.5.0)", "pyann name = "graphql-relay" version = "2.0.1" description = "Relay implementation for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -1310,7 +1332,6 @@ six = ">=1.12" name = "griffe" version = "0.38.0" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1325,7 +1346,6 @@ colorama = ">=0.4" name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1337,7 +1357,6 @@ files = [ name = "importlib-metadata" version = "6.8.0" description = "Read metadata from Python packages" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1357,7 +1376,6 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs name = "importlib-resources" version = "5.13.0" description = "Read resources from Python packages" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1372,11 +1390,25 @@ zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +[[package]] +name = "incremental" +version = "22.10.0" +description = "\"A small library that versions your Python projects.\"" +optional = false +python-versions = "*" +files = [ + {file = "incremental-22.10.0-py2.py3-none-any.whl", hash = "sha256:b864a1f30885ee72c5ac2835a761b8fe8aa9c28b9395cacf27286602688d3e51"}, + {file = "incremental-22.10.0.tar.gz", hash = "sha256:912feeb5e0f7e0188e6f42241d2f450002e11bbc0937c65865045854c24c0bd0"}, +] + +[package.extras] +mypy = ["click (>=6.0)", "mypy (==0.812)", "twisted (>=16.4.0)"] +scripts = ["click (>=6.0)", "twisted (>=16.4.0)"] + [[package]] name = "inflection" version = "0.5.1" description = "A port of Ruby on Rails inflector to Python" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1388,7 +1420,6 @@ files = [ name = "invoke" version = "2.2.0" description = "Pythonic task execution" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1396,11 +1427,49 @@ files = [ {file = "invoke-2.2.0.tar.gz", hash = "sha256:ee6cbb101af1a859c7fe84f2a264c059020b0cb7fe3535f9424300ab568f6bd5"}, ] +[[package]] +name = "ipython" +version = "8.12.3" +description = "IPython: Productive Interactive Computing" +optional = false +python-versions = ">=3.8" +files = [ + {file = "ipython-8.12.3-py3-none-any.whl", hash = "sha256:b0340d46a933d27c657b211a329d0be23793c36595acf9e6ef4164bc01a1804c"}, + {file = "ipython-8.12.3.tar.gz", hash = "sha256:3910c4b54543c2ad73d06579aa771041b7d5707b033bd488669b4cf544e3b363"}, +] + +[package.dependencies] +appnope = {version = "*", markers = "sys_platform == \"darwin\""} +backcall = "*" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} +pickleshare = "*" +prompt-toolkit = ">=3.0.30,<3.0.37 || >3.0.37,<3.1.0" +pygments = ">=2.4.0" +stack-data = "*" +traitlets = ">=5" +typing-extensions = {version = "*", markers = "python_version < \"3.10\""} + +[package.extras] +all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +black = ["black"] +doc = ["docrepr", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +kernel = ["ipykernel"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["ipywidgets", "notebook"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] +test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] + [[package]] name = "isort" version = "5.12.0" description = "A Python utility / library to sort Python imports." -category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -1414,11 +1483,29 @@ pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib" plugins = ["setuptools"] requirements-deprecated-finder = ["pip-api", "pipreqs"] +[[package]] +name = "jedi" +version = "0.19.1" +description = "An autocompletion tool for Python that can be used for text editors." +optional = false +python-versions = ">=3.6" +files = [ + {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, + {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, +] + +[package.dependencies] +parso = ">=0.8.3,<0.9.0" + +[package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] + [[package]] name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1436,7 +1523,6 @@ i18n = ["Babel (>=2.7)"] name = "jsonschema" version = "4.18.6" description = "An implementation of JSON Schema validation for Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1460,7 +1546,6 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- name = "jsonschema-specifications" version = "2023.11.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1476,7 +1561,6 @@ referencing = ">=0.31.0" name = "junos-eznc" version = "2.6.8" description = "Junos 'EZ' automation for non-programmers" -category = "main" optional = false python-versions = ">=3.5, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1501,7 +1585,6 @@ yamlordereddictloader = "*" name = "kombu" version = "5.3.4" description = "Messaging library for Python." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1536,7 +1619,6 @@ zookeeper = ["kazoo (>=2.8.0)"] name = "lazy-object-proxy" version = "1.9.0" description = "A fast and thorough lazy object proxy." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1582,7 +1664,6 @@ files = [ name = "lxml" version = "4.9.3" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" files = [ @@ -1690,7 +1771,6 @@ source = ["Cython (>=0.29.35)"] name = "markdown" version = "3.3.7" description = "Python implementation of Markdown." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1708,7 +1788,6 @@ testing = ["coverage", "pyyaml"] name = "markdown-it-py" version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1733,7 +1812,6 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "markupsafe" version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1789,11 +1867,24 @@ files = [ {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, ] +[[package]] +name = "matplotlib-inline" +version = "0.1.6" +description = "Inline Matplotlib backend for Jupyter" +optional = false +python-versions = ">=3.5" +files = [ + {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, + {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, +] + +[package.dependencies] +traitlets = "*" + [[package]] name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1805,7 +1896,6 @@ files = [ name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1817,7 +1907,6 @@ files = [ name = "mergedeep" version = "1.3.4" description = "A deep merge function for 🐍." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1829,7 +1918,6 @@ files = [ name = "mkdocs" version = "1.5.2" description = "Project documentation with Markdown." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1861,7 +1949,6 @@ min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-imp name = "mkdocs-autorefs" version = "0.5.0" description = "Automatically link across pages in MkDocs." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1877,7 +1964,6 @@ mkdocs = ">=1.1" name = "mkdocs-material" version = "9.1.15" description = "Documentation that simply works" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1900,7 +1986,6 @@ requests = ">=2.26" name = "mkdocs-material-extensions" version = "1.3" description = "Extension pack for Python Markdown and MkDocs Material." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1912,7 +1997,6 @@ files = [ name = "mkdocs-version-annotations" version = "1.0.0" description = "MkDocs plugin to add custom admonitions for documenting version differences" -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -1924,7 +2008,6 @@ files = [ name = "mkdocstrings" version = "0.22.0" description = "Automatic documentation from sources, for MkDocs." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1951,7 +2034,6 @@ python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"] name = "mkdocstrings-python" version = "1.5.2" description = "A Python handler for mkdocstrings." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1967,7 +2049,6 @@ mkdocstrings = ">=0.20" name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1979,7 +2060,6 @@ files = [ name = "napalm" version = "4.1.0" description = "Network Automation and Programmability Abstraction Layer with Multivendor support" -category = "main" optional = false python-versions = "*" files = [ @@ -2012,7 +2092,6 @@ typing-extensions = ">=4.3.0" name = "nautobot" version = "2.0.4" description = "Source of truth and network automation platform." -category = "main" optional = false python-versions = ">=3.8,<3.12" files = [ @@ -2075,7 +2154,6 @@ sso = ["social-auth-core[openidconnect,saml] (>=4.4.2,<4.5.0)"] name = "ncclient" version = "0.6.13" description = "Python library for NETCONF clients" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -2092,7 +2170,6 @@ six = "*" name = "netaddr" version = "0.8.0" description = "A network address manipulation library for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -2104,7 +2181,6 @@ files = [ name = "netmiko" version = "4.3.0" description = "Multi-vendor library to simplify legacy CLI connections to network devices" -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2124,7 +2200,6 @@ textfsm = ">=1.1.3" name = "netutils" version = "1.6.0" description = "Common helper functions useful in network automation." -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2139,7 +2214,6 @@ optionals = ["jsonschema (>=4.17.3,<5.0.0)", "napalm (>=4.0.0,<5.0.0)"] name = "ntc-templates" version = "4.0.1" description = "TextFSM Templates for Network Devices, and Python wrapper for TextFSM's CliTable." -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -2154,7 +2228,6 @@ textfsm = ">=1.1.0,<2.0.0" name = "oauthlib" version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2171,7 +2244,6 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] name = "packaging" version = "23.1" description = "Core utilities for Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2183,7 +2255,6 @@ files = [ name = "paramiko" version = "3.3.1" description = "SSH2 protocol library" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2201,11 +2272,25 @@ all = ["gssapi (>=1.4.1)", "invoke (>=2.0)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1 gssapi = ["gssapi (>=1.4.1)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"] invoke = ["invoke (>=2.0)"] +[[package]] +name = "parso" +version = "0.8.3" +description = "A Python Parser" +optional = false +python-versions = ">=3.6" +files = [ + {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, + {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, +] + +[package.extras] +qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +testing = ["docopt", "pytest (<6.0.0)"] + [[package]] name = "pathspec" version = "0.11.2" description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2217,7 +2302,6 @@ files = [ name = "pbr" version = "6.0.0" description = "Python Build Reasonableness" -category = "dev" optional = false python-versions = ">=2.6" files = [ @@ -2225,11 +2309,35 @@ files = [ {file = "pbr-6.0.0.tar.gz", hash = "sha256:d1377122a5a00e2f940ee482999518efe16d745d423a670c27773dfbc3c9a7d9"}, ] +[[package]] +name = "pexpect" +version = "4.9.0" +description = "Pexpect allows easy control of interactive console applications." +optional = false +python-versions = "*" +files = [ + {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, + {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, +] + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "pickleshare" +version = "0.7.5" +description = "Tiny 'shelve'-like database with concurrency support" +optional = false +python-versions = "*" +files = [ + {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, + {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, +] + [[package]] name = "pillow" version = "10.0.1" description = "Python Imaging Library (Fork)" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2297,7 +2405,6 @@ tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "pa name = "pkgutil-resolve-name" version = "1.3.10" description = "Resolve a name to an object." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2309,7 +2416,6 @@ files = [ name = "platformdirs" version = "4.0.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2325,7 +2431,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co name = "prometheus-client" version = "0.17.1" description = "Python client for the Prometheus monitoring system." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2340,7 +2445,6 @@ twisted = ["twisted"] name = "promise" version = "2.3" description = "Promises/A+ implementation for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -2357,7 +2461,6 @@ test = ["coveralls", "futures", "mock", "pytest (>=2.7.3)", "pytest-benchmark", name = "prompt-toolkit" version = "3.0.41" description = "Library for building powerful interactive command lines in Python" -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -2372,7 +2475,6 @@ wcwidth = "*" name = "psycopg2-binary" version = "2.9.9" description = "psycopg2 - Python-PostgreSQL Database Adapter" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2447,11 +2549,35 @@ files = [ {file = "psycopg2_binary-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957"}, ] +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +optional = false +python-versions = "*" +files = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] + +[[package]] +name = "pure-eval" +version = "0.2.2" +description = "Safely evaluate AST nodes without side effects" +optional = false +python-versions = "*" +files = [ + {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, + {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, +] + +[package.extras] +tests = ["pytest"] + [[package]] name = "pycodestyle" version = "2.9.1" description = "Python style guide checker" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2463,7 +2589,6 @@ files = [ name = "pycparser" version = "2.21" description = "C parser in Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2471,29 +2596,10 @@ files = [ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] -[[package]] -name = "pydocstyle" -version = "6.3.0" -description = "Python docstring style checker" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, - {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, -] - -[package.dependencies] -snowballstemmer = ">=2.2.0" - -[package.extras] -toml = ["tomli (>=1.2.3)"] - [[package]] name = "pyeapi" version = "1.0.2" description = "Python Client for eAPI" -category = "main" optional = false python-versions = "*" files = [ @@ -2511,7 +2617,6 @@ test = ["coverage", "mock"] name = "pyflakes" version = "2.5.0" description = "passive checker of Python programs" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2523,7 +2628,6 @@ files = [ name = "pygments" version = "2.17.1" description = "Pygments is a syntax highlighting package written in Python." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2539,7 +2643,6 @@ windows-terminal = ["colorama (>=0.4.6)"] name = "pyjwt" version = "2.8.0" description = "JSON Web Token implementation in Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2557,7 +2660,6 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] name = "pylint" version = "2.17.7" description = "python code static checker" -category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -2587,7 +2689,6 @@ testutils = ["gitpython (>3)"] name = "pylint-django" version = "2.5.5" description = "A Pylint plugin to help Pylint understand the Django web framework" -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -2606,7 +2707,6 @@ with-django = ["Django (>=2.2)"] name = "pylint-nautobot" version = "0.2.1" description = "Custom Pylint Rules for Nautobot" -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -2624,7 +2724,6 @@ tomli = ">=2.0.1,<3.0.0" name = "pylint-plugin-utils" version = "0.8.2" description = "Utilities and helpers for writing Pylint plugins" -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -2639,7 +2738,6 @@ pylint = ">=1.7" name = "pymdown-extensions" version = "10.4" description = "Extension pack for Python Markdown." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2658,7 +2756,6 @@ extra = ["pygments (>=2.12)"] name = "pynacl" version = "1.5.0" description = "Python binding to the Networking and Cryptography (NaCl) library" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2685,7 +2782,6 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] name = "pyparsing" version = "3.1.1" description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "main" optional = false python-versions = ">=3.6.8" files = [ @@ -2700,7 +2796,6 @@ diagrams = ["jinja2", "railroad-diagrams"] name = "pyserial" version = "3.5" description = "Python Serial Port Extension" -category = "main" optional = false python-versions = "*" files = [ @@ -2715,7 +2810,6 @@ cp2110 = ["hidapi"] name = "python-crontab" version = "3.0.0" description = "Python Crontab API" -category = "main" optional = false python-versions = "*" files = [ @@ -2734,7 +2828,6 @@ cron-schedule = ["croniter"] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -2749,7 +2842,6 @@ six = ">=1.5" name = "python-slugify" version = "8.0.1" description = "A Python slugify application that also handles Unicode" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2767,7 +2859,6 @@ unidecode = ["Unidecode (>=1.1.1)"] name = "python3-openid" version = "3.2.0" description = "OpenID support for modern servers and consumers." -category = "main" optional = false python-versions = "*" files = [ @@ -2786,7 +2877,6 @@ postgresql = ["psycopg2"] name = "pytz" version = "2023.3.post1" description = "World timezone definitions, modern and historical" -category = "main" optional = false python-versions = "*" files = [ @@ -2798,7 +2888,6 @@ files = [ name = "pyuwsgi" version = "2.0.23" description = "The uWSGI server" -category = "main" optional = false python-versions = "*" files = [ @@ -2851,7 +2940,6 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2901,7 +2989,6 @@ files = [ name = "pyyaml-env-tag" version = "0.1" description = "A custom YAML tag for referencing environment variables in YAML files. " -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2916,7 +3003,6 @@ pyyaml = "*" name = "redis" version = "5.0.1" description = "Python client for Redis database and key-value store" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2935,7 +3021,6 @@ ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)" name = "referencing" version = "0.31.0" description = "JSON Referencing + Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2951,7 +3036,6 @@ rpds-py = ">=0.7.0" name = "regex" version = "2023.10.3" description = "Alternative regular expression module, to replace re." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3049,7 +3133,6 @@ files = [ name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3071,7 +3154,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-oauthlib" version = "1.3.1" description = "OAuthlib authentication support for Requests." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -3090,7 +3172,6 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] name = "rich" version = "13.7.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -3110,7 +3191,6 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] name = "rpds-py" version = "0.13.1" description = "Python bindings to Rust's persistent data structures (rpds)" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3215,11 +3295,36 @@ files = [ {file = "rpds_py-0.13.1.tar.gz", hash = "sha256:264f3a5906c62b9df3a00ad35f6da1987d321a053895bd85f9d5c708de5c0fbf"}, ] +[[package]] +name = "ruff" +version = "0.1.15" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5fe8d54df166ecc24106db7dd6a68d44852d14eb0729ea4672bb4d96c320b7df"}, + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f0bfbb53c4b4de117ac4d6ddfd33aa5fc31beeaa21d23c45c6dd249faf9126f"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d432aec35bfc0d800d4f70eba26e23a352386be3a6cf157083d18f6f5881c8"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9405fa9ac0e97f35aaddf185a1be194a589424b8713e3b97b762336ec79ff807"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66ec24fe36841636e814b8f90f572a8c0cb0e54d8b5c2d0e300d28a0d7bffec"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6f8ad828f01e8dd32cc58bc28375150171d198491fc901f6f98d2a39ba8e3ff5"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86811954eec63e9ea162af0ffa9f8d09088bab51b7438e8b6488b9401863c25e"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd4025ac5e87d9b80e1f300207eb2fd099ff8200fa2320d7dc066a3f4622dc6b"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b17b93c02cdb6aeb696effecea1095ac93f3884a49a554a9afa76bb125c114c1"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ddb87643be40f034e97e97f5bc2ef7ce39de20e34608f3f829db727a93fb82c5"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:abf4822129ed3a5ce54383d5f0e964e7fef74a41e48eb1dfad404151efc130a2"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6c629cf64bacfd136c07c78ac10a54578ec9d1bd2a9d395efbee0935868bf852"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1bab866aafb53da39c2cadfb8e1c4550ac5340bb40300083eb8967ba25481447"}, + {file = "ruff-0.1.15-py3-none-win32.whl", hash = "sha256:2417e1cb6e2068389b07e6fa74c306b2810fe3ee3476d5b8a96616633f40d14f"}, + {file = "ruff-0.1.15-py3-none-win_amd64.whl", hash = "sha256:3837ac73d869efc4182d9036b1405ef4c73d9b1f88da2413875e34e0d6919587"}, + {file = "ruff-0.1.15-py3-none-win_arm64.whl", hash = "sha256:9a933dfb1c14ec7a33cceb1e49ec4a16b51ce3c20fd42663198746efc0427360"}, + {file = "ruff-0.1.15.tar.gz", hash = "sha256:f6dfa8c1b21c913c326919056c390966648b680966febcb796cc9d1aaab8564e"}, +] + [[package]] name = "rx" version = "1.6.3" description = "Reactive Extensions (Rx) for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -3230,7 +3335,6 @@ files = [ name = "scp" version = "0.14.5" description = "scp module for paramiko" -category = "main" optional = false python-versions = "*" files = [ @@ -3245,7 +3349,6 @@ paramiko = "*" name = "setuptools" version = "69.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3262,7 +3365,6 @@ testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jar name = "singledispatch" version = "4.1.0" description = "Backport functools.singledispatch to older Pythons." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3278,7 +3380,6 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -3290,7 +3391,6 @@ files = [ name = "smmap" version = "5.0.1" description = "A pure Python implementation of a sliding window memory map manager" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3298,23 +3398,10 @@ files = [ {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, ] -[[package]] -name = "snowballstemmer" -version = "2.2.0" -description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, -] - [[package]] name = "social-auth-app-django" version = "5.2.0" description = "Python Social Authentication, Django integration." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3330,7 +3417,6 @@ social-auth-core = ">=4.4.1" name = "social-auth-core" version = "4.5.0" description = "Python social authentication made simple." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3357,7 +3443,6 @@ saml = ["python3-saml (>=1.5.0)"] name = "sqlparse" version = "0.4.4" description = "A non-validating SQL parser." -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -3370,11 +3455,29 @@ dev = ["build", "flake8"] doc = ["sphinx"] test = ["pytest", "pytest-cov"] +[[package]] +name = "stack-data" +version = "0.6.3" +description = "Extract data from python stack frames and tracebacks for informative displays" +optional = false +python-versions = "*" +files = [ + {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, + {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, +] + +[package.dependencies] +asttokens = ">=2.1.0" +executing = ">=1.2.0" +pure-eval = "*" + +[package.extras] +tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] + [[package]] name = "stevedore" version = "5.1.0" description = "Manage dynamic plugins for Python applications" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3389,7 +3492,6 @@ pbr = ">=2.0.0,<2.1.0 || >2.1.0" name = "svgwrite" version = "1.4.3" description = "A Python library to create SVG drawings." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3401,7 +3503,6 @@ files = [ name = "text-unidecode" version = "1.3" description = "The most basic Text::Unidecode port" -category = "main" optional = false python-versions = "*" files = [ @@ -3413,7 +3514,6 @@ files = [ name = "textfsm" version = "1.1.3" description = "Python module for parsing semi-structured text into python tables." -category = "main" optional = false python-versions = "*" files = [ @@ -3429,7 +3529,6 @@ six = "*" name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" -category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -3441,7 +3540,6 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3453,7 +3551,6 @@ files = [ name = "tomlkit" version = "0.12.3" description = "Style preserving TOML library" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3461,11 +3558,47 @@ files = [ {file = "tomlkit-0.12.3.tar.gz", hash = "sha256:75baf5012d06501f07bee5bf8e801b9f343e7aac5a92581f20f80ce632e6b5a4"}, ] +[[package]] +name = "towncrier" +version = "23.6.0" +description = "Building newsfiles for your project." +optional = false +python-versions = ">=3.7" +files = [ + {file = "towncrier-23.6.0-py3-none-any.whl", hash = "sha256:da552f29192b3c2b04d630133f194c98e9f14f0558669d427708e203fea4d0a5"}, + {file = "towncrier-23.6.0.tar.gz", hash = "sha256:fc29bd5ab4727c8dacfbe636f7fb5dc53b99805b62da1c96b214836159ff70c1"}, +] + +[package.dependencies] +click = "*" +click-default-group = "*" +importlib-resources = {version = ">=5", markers = "python_version < \"3.10\""} +incremental = "*" +jinja2 = "*" +tomli = {version = "*", markers = "python_version < \"3.11\""} + +[package.extras] +dev = ["furo", "packaging", "sphinx (>=5)", "twisted"] + +[[package]] +name = "traitlets" +version = "5.14.1" +description = "Traitlets Python configuration system" +optional = false +python-versions = ">=3.8" +files = [ + {file = "traitlets-5.14.1-py3-none-any.whl", hash = "sha256:2e5a030e6eff91737c643231bfcf04a65b0132078dad75e4936700b213652e74"}, + {file = "traitlets-5.14.1.tar.gz", hash = "sha256:8585105b371a04b8316a43d5ce29c098575c2e477850b62b848b964f1444527e"}, +] + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<7.5)", "pytest-mock", "pytest-mypy-testing"] + [[package]] name = "transitions" version = "0.9.0" description = "A lightweight, object-oriented Python state machine implementation with many extensions." -category = "main" optional = false python-versions = "*" files = [ @@ -3484,7 +3617,6 @@ test = ["pytest"] name = "ttp" version = "0.9.5" description = "Template Text Parser" -category = "main" optional = false python-versions = ">=2.7,<4.0" files = [ @@ -3500,7 +3632,6 @@ full = ["cerberus (>=1.3.0,<1.4.0)", "deepdiff (>=5.8.0,<5.9.0)", "jinja2 (>=3.0 name = "ttp-templates" version = "0.3.5" description = "Template Text Parser Templates collections" -category = "main" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -3518,7 +3649,6 @@ docs = ["mkdocs (==1.2.4)", "mkdocs-material (==7.2.2)", "mkdocs-material-extens name = "typing-extensions" version = "4.8.0" description = "Backported and Experimental Type Hints for Python 3.8+" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3530,7 +3660,6 @@ files = [ name = "tzdata" version = "2023.3" description = "Provider of IANA time zone data" -category = "main" optional = false python-versions = ">=2" files = [ @@ -3542,7 +3671,6 @@ files = [ name = "uritemplate" version = "4.1.1" description = "Implementation of RFC 6570 URI Templates" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3554,7 +3682,6 @@ files = [ name = "urllib3" version = "2.1.0" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3571,7 +3698,6 @@ zstd = ["zstandard (>=0.18.0)"] name = "vine" version = "5.1.0" description = "Python promises." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3583,7 +3709,6 @@ files = [ name = "watchdog" version = "3.0.0" description = "Filesystem events monitoring" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3623,7 +3748,6 @@ watchmedo = ["PyYAML (>=3.10)"] name = "wcwidth" version = "0.2.10" description = "Measures the displayed width of unicode strings in a terminal" -category = "main" optional = false python-versions = "*" files = [ @@ -3635,7 +3759,6 @@ files = [ name = "wrapt" version = "1.16.0" description = "Module for decorators, wrappers and monkey patching." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3715,7 +3838,6 @@ files = [ name = "yamllint" version = "1.33.0" description = "A linter for YAML files." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3734,7 +3856,6 @@ dev = ["doc8", "flake8", "flake8-import-order", "rstcheck[sphinx]", "sphinx"] name = "yamlordereddictloader" version = "0.4.2" description = "YAML loader and dumper for PyYAML allowing to keep keys order." -category = "main" optional = false python-versions = "*" files = [ @@ -3749,7 +3870,6 @@ pyyaml = "*" name = "zipp" version = "3.17.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3761,7 +3881,10 @@ files = [ docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +[extras] +all = [] + [metadata] lock-version = "2.0" python-versions = ">=3.8,<3.12" -content-hash = "3eeabf46ff196a3c23659ada1fe5d50b0bd1a788f9c858f3a475d39b7e12bd80" +content-hash = "e39faf37d6de2013b7111b3b139aedaa630464694e0f13850acb3f6404aaa87a" From 0291743768ea89c80c261236d91f1f6024f5d4ea Mon Sep 17 00:00:00 2001 From: Jan Snasel Date: Wed, 31 Jan 2024 13:20:29 +0000 Subject: [PATCH 035/225] chore: Changelog fragment --- changes/151.changed | 1 + 1 file changed, 1 insertion(+) create mode 100644 changes/151.changed diff --git a/changes/151.changed b/changes/151.changed new file mode 100644 index 00000000..7a7cdde0 --- /dev/null +++ b/changes/151.changed @@ -0,0 +1 @@ +Replaced pydocstyle with ruff. From 811726afabeda57aa8af8ac7d323b42cef10179e Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Wed, 31 Jan 2024 18:32:38 +0000 Subject: [PATCH 036/225] added juniper support --- nautobot_device_onboarding/constants.py | 1 + nautobot_device_onboarding/jobs.py | 4 ++- .../nornir_plays/command_getter.py | 2 +- .../nornir_plays/processor.py | 5 +-- nautobot_device_onboarding/utils/formatter.py | 31 +++++++++++++++++++ 5 files changed, 39 insertions(+), 4 deletions(-) diff --git a/nautobot_device_onboarding/constants.py b/nautobot_device_onboarding/constants.py index a469e75e..f9c9f91f 100644 --- a/nautobot_device_onboarding/constants.py +++ b/nautobot_device_onboarding/constants.py @@ -13,4 +13,5 @@ "cisco_ios": ["show version", "show inventory", "show interfaces"], "cisco_nxos": ["show version", "show inventory", "show interface"], "cisco_xe": ["show version", "show inventory", "show interfaces"], + "juniper_junos": ["show version", "show interfaces", "show chassis hardware"], } diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 286e9fe3..539df5d8 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -472,7 +472,9 @@ def run(self, *args, **kwargs): ) as nornir_obj: nr_with_processors = nornir_obj.with_processors([ProcessorDO(logger, compiled_results)]) for entered_ip in self.ip_addresses: - single_host_inventory_constructed = _set_inventory(entered_ip, self.platform, self.port, self.secrets_group) + single_host_inventory_constructed = _set_inventory( + entered_ip, self.platform, self.port, self.secrets_group + ) nr_with_processors.inventory.hosts.update(single_host_inventory_constructed) nr_with_processors.run(task=netmiko_send_commands) final_result = self._process_result(compiled_results, self.ip_addresses) diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index 5fb42f12..6d8a547e 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -8,5 +8,5 @@ def netmiko_send_commands(task: Task): """Run commands specified in PLATFORM_COMMAND_MAP.""" - for command in PLATFORM_COMMAND_MAP.get(task.host.platform, "default"): + for command in PLATFORM_COMMAND_MAP.get(task.host.platform): task.run(task=netmiko_send_command, name=command, command_string=command, use_textfsm=True) diff --git a/nautobot_device_onboarding/nornir_plays/processor.py b/nautobot_device_onboarding/nornir_plays/processor.py index 428def3f..64b26c45 100755 --- a/nautobot_device_onboarding/nornir_plays/processor.py +++ b/nautobot_device_onboarding/nornir_plays/processor.py @@ -7,7 +7,7 @@ from nornir_nautobot.exceptions import NornirNautobotException from nornir_nautobot.plugins.processors import BaseLoggingProcessor -from nautobot_device_onboarding.utils.formatter import format_ob_data_ios, format_ob_data_nxos +from nautobot_device_onboarding.utils.formatter import format_ob_data_ios, format_ob_data_nxos, format_ob_data_junos class ProcessorDO(BaseLoggingProcessor): @@ -71,7 +71,6 @@ def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult "failed": result.failed, "subtask_result": result.result, } - self.logger.info(f" self.data: {self.data}") if self.data[task.name][host.name].get("failed"): self.data[host.name] = { @@ -89,6 +88,8 @@ def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult formatted_data = format_ob_data_ios(host, result) elif host.platform == "cisco_nxos": formatted_data = format_ob_data_nxos(host, result) + elif host.platform == "juniper_junos": + formatted_data = format_ob_data_junos(host, result) else: formatted_data = {} self.logger.info(f"No formatter for {host.platform}.", extra={"object": task.host}) diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index f16131fc..b1757ff2 100644 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -57,4 +57,35 @@ def format_ob_data_nxos(host, result): formatted_data["mgmt_interface"] = interface_name break + +def format_ob_data_junos(host, result): + """Format the data for onboarding Juniper JUNOS devices.""" + primary_ip4 = host.name + formatted_data = {} + + for r in result: + if r.name == "show version": + device_type = r.result[0].get("model") + formatted_data["device_type"] = device_type + hostname = r.result[0].get("hostname") + serial = "USASR24490" + # serial = r.result[0].get("serial") + formatted_data["hostname"] = hostname + if serial: + formatted_data["serial"] = serial + else: + formatted_data["serial"] = "" + elif r.name == "show interfaces": + show_interfaces = r.result + print(f"show interfaces {show_interfaces}") + for interface in show_interfaces: + if interface.get("local") == primary_ip4: + print(interface.get("destination")) + mask_length = interface.get("destination").split("/")[1] + print(f"interface mask {mask_length}") + interface_name = interface.get("interface") + formatted_data["mask_length"] = mask_length + formatted_data["mgmt_interface"] = interface_name + break + return formatted_data From 7438e6460a44fbcf9a09987b5cc16b226ccdafde Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Wed, 31 Jan 2024 21:08:31 +0000 Subject: [PATCH 037/225] adding NI --- nautobot_device_onboarding/jobs.py | 81 ++++++++++++++++++- .../nornir_plays/processor.py | 2 +- 2 files changed, 81 insertions(+), 2 deletions(-) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 539df5d8..483149c5 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -488,7 +488,86 @@ def run(self, *args, **kwargs): self.logger.info("Error: %s", err) return err return final_result + +class CommandGetterNetworkImporter(Job): + """Simple Job to Execute Show Command.""" + + debug = BooleanVar(description="Enable for more verbose logging.") + devices = MultiObjectVar( + model=Device, + required=False, + description="Device(s) to update.", + ) + location = ObjectVar( + model=Location, + query_params={"content_type": "dcim.device"}, + required=False, + description="Only update devices at a specific location.", + ) + device_role = ObjectVar( + model=Role, + query_params={"content_types": "dcim.device"}, + required=False, + description="Only update devices with the selected role.", + ) + tag = ObjectVar( + model=Tag, + query_params={"content_types": "dcim.device"}, + required=False, + description="Only update devices with the selected tag.", + ) + + class Meta: # pylint: disable=too-few-public-methods + """Meta object boilerplate for onboarding.""" + + name = "Command Getter for Network Importer" + description = "Login to a device(s) and run commands." + has_sensitive_variables = False + hidden = False + + def _process_result(self, command_result, ip_addresses): + """Process the data returned from devices.""" + processed_device_data = {} + for ip_address in ip_addresses: + processed_device_data[ip_address] = command_result[ip_address] + if self.debug: + self.logger.debug(f"Processed CommandGetterNetworkImporter return for {ip_address}: {command_result[ip_address]}") + return processed_device_data + + def run(self, *args, **kwargs): + """Process onboarding task from ssot-ni job.""" + self.ip_addresses = kwargs["ip_addresses"].replace(" ", "").split(",") + self.port = kwargs["port"] + self.timeout = kwargs["timeout"] + self.secrets_group = kwargs["secrets_group"] + self.platform = kwargs["platform"] + + # Initiate Nornir instance with empty inventory + try: + logger = NornirLogger(self.job_result, log_level=0) + compiled_results = {} + with InitNornir( + runner=NORNIR_SETTINGS.get("runner"), + logging={"enabled": False}, + inventory={ + "plugin": "nautobot-inventory", + }, + ) as nornir_obj: + nr_with_processors = nornir_obj.with_processors([ProcessorDO(logger, compiled_results)]) + nr_with_processors.run(task=netmiko_send_commands) + + final_result = self._process_result(compiled_results, self.ip_addresses) + + # Remove before final merge # + for host, data in nr_with_processors.inventory.hosts.items(): + self.logger.info("%s;\n%s", host, data.dict()) + # End # + + except Exception as err: # pylint: disable=broad-exception-caught + self.logger.info("Error: %s", err) + return err + return final_result -jobs = [OnboardingTask, SSOTDeviceOnboarding, SSOTNetworkImporter, CommandGetterDO] +jobs = [OnboardingTask, SSOTDeviceOnboarding, SSOTNetworkImporter, CommandGetterDO, CommandGetterNetworkImporter] register_jobs(*jobs) diff --git a/nautobot_device_onboarding/nornir_plays/processor.py b/nautobot_device_onboarding/nornir_plays/processor.py index 64b26c45..afd4f3b9 100755 --- a/nautobot_device_onboarding/nornir_plays/processor.py +++ b/nautobot_device_onboarding/nornir_plays/processor.py @@ -92,7 +92,7 @@ def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult formatted_data = format_ob_data_junos(host, result) else: formatted_data = {} - self.logger.info(f"No formatter for {host.platform}.", extra={"object": task.host}) + self.logger.info(f"No formatter for platform: {host.platform}.", extra={"object": task.host}) self.data[host.name].update(formatted_data) From aac88a42e2a100923d18f9652a4fe2c9414e8694 Mon Sep 17 00:00:00 2001 From: David Cates Date: Tue, 6 Feb 2024 10:01:56 -0700 Subject: [PATCH 038/225] bump version --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 62e0bb83..1686367b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nautobot-device-onboarding" -version = "3.0.1" +version = "3.0.2a0" description = "A app for Nautobot to easily onboard new devices." authors = ["Network to Code, LLC "] license = "Apache-2.0" From 41ec82d8ebbee17d346376ca05613ac38a353f78 Mon Sep 17 00:00:00 2001 From: David Cates Date: Tue, 6 Feb 2024 10:20:20 -0700 Subject: [PATCH 039/225] black, isort, pydocstyle --- .../diffsync/adapters/onboarding_adapters.py | 2 +- .../diffsync/models/onboarding_models.py | 3 +- nautobot_device_onboarding/jobs.py | 32 ++++++++++++------- .../nornir_plays/processor.py | 2 +- .../utils/inventory_creator.py | 3 +- 5 files changed, 25 insertions(+), 17 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index 081e80dd..7115663a 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -2,12 +2,12 @@ import time +import diffsync import netaddr from nautobot.apps.choices import JobResultStatusChoices from nautobot.dcim.models import Device, DeviceType, Manufacturer, Platform from nautobot.extras.models import Job, JobResult -import diffsync from nautobot_device_onboarding.diffsync.models import onboarding_models ####################################### diff --git a/nautobot_device_onboarding/diffsync/models/onboarding_models.py b/nautobot_device_onboarding/diffsync/models/onboarding_models.py index 879982e0..1f45ac83 100644 --- a/nautobot_device_onboarding/diffsync/models/onboarding_models.py +++ b/nautobot_device_onboarding/diffsync/models/onboarding_models.py @@ -3,6 +3,7 @@ import ipaddress from typing import Optional +from diffsync import DiffSyncModel from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist, ValidationError from nautobot.apps.choices import InterfaceTypeChoices, PrefixTypeChoices from nautobot.dcim.models import Device, DeviceType, Interface, Manufacturer, Platform @@ -10,8 +11,6 @@ from nautobot.ipam.models import IPAddress, Prefix from nautobot_ssot.contrib import NautobotModel -from diffsync import DiffSyncModel - class OnboardingDevice(DiffSyncModel): """Diffsync model for device data.""" diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 483149c5..b0259c6d 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -10,6 +10,10 @@ from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from nautobot.extras.models import Role, SecretsGroup, SecretsGroupAssociation, Status, Tag from nautobot.ipam.models import Namespace +from nautobot_ssot.jobs.base import DataSource +from nornir import InitNornir +from nornir.core.plugins.inventory import InventoryPluginRegister + from nautobot_device_onboarding.diffsync.adapters.network_importer_adapters import ( NetworkImporterNautobotAdapter, NetworkImporterNetworkAdapter, @@ -26,9 +30,6 @@ from nautobot_device_onboarding.nornir_plays.logger import NornirLogger from nautobot_device_onboarding.nornir_plays.processor import ProcessorDO from nautobot_device_onboarding.utils.inventory_creator import _set_inventory -from nautobot_ssot.jobs.base import DataSource -from nornir import InitNornir -from nornir.core.plugins.inventory import InventoryPluginRegister InventoryPluginRegister.register("empty-inventory", EmptyInventory) @@ -352,10 +353,10 @@ def run( class SSOTNetworkImporter(DataSource): # pylint: disable=too-many-instance-attributes """Job syncing extended device attributes into Nautobot.""" - def __init__(self): - """Initialize SSOTDeviceOnboarding.""" - super().__init__() - self.diffsync_flags = DiffSyncFlags.SKIP_UNMATCHED_DST + # def __init__(self): + # """Initialize SSOTDeviceOnboarding.""" + # super().__init__() + # self.diffsync_flags = DiffSyncFlags.SKIP_UNMATCHED_DST class Meta: # pylint: disable=too-few-public-methods """Metadata about this Job.""" @@ -367,6 +368,9 @@ class Meta: # pylint: disable=too-few-public-methods ) debug = BooleanVar(description="Enable for more verbose logging.") + namespace = ObjectVar( + model=Namespace, required=True, description="The namespace for all IP addresses created or updated in the sync." + ) devices = MultiObjectVar( model=Device, required=False, @@ -402,12 +406,13 @@ def load_target_adapter(self): self.target_adapter.load() def run( - self, dryrun, memory_profiling, debug, location, devices, device_role, tag, *args, **kwargs + self, dryrun, memory_profiling, debug, namespace, location, devices, device_role, tag, *args, **kwargs ): # pylint:disable=arguments-differ, disable=too-many-arguments """Run sync.""" self.dryrun = dryrun self.memory_profiling = memory_profiling self.debug = debug + self.namespace = namespace self.location = location self.devices = devices self.device_role = device_role @@ -488,7 +493,8 @@ def run(self, *args, **kwargs): self.logger.info("Error: %s", err) return err return final_result - + + class CommandGetterNetworkImporter(Job): """Simple Job to Execute Show Command.""" @@ -531,7 +537,9 @@ def _process_result(self, command_result, ip_addresses): for ip_address in ip_addresses: processed_device_data[ip_address] = command_result[ip_address] if self.debug: - self.logger.debug(f"Processed CommandGetterNetworkImporter return for {ip_address}: {command_result[ip_address]}") + self.logger.debug( + f"Processed CommandGetterNetworkImporter return for {ip_address}: {command_result[ip_address]}" + ) return processed_device_data def run(self, *args, **kwargs): @@ -555,7 +563,7 @@ def run(self, *args, **kwargs): ) as nornir_obj: nr_with_processors = nornir_obj.with_processors([ProcessorDO(logger, compiled_results)]) nr_with_processors.run(task=netmiko_send_commands) - + final_result = self._process_result(compiled_results, self.ip_addresses) # Remove before final merge # @@ -569,5 +577,5 @@ def run(self, *args, **kwargs): return final_result -jobs = [OnboardingTask, SSOTDeviceOnboarding, SSOTNetworkImporter, CommandGetterDO, CommandGetterNetworkImporter] +jobs = [OnboardingTask, SSOTDeviceOnboarding, CommandGetterDO, CommandGetterNetworkImporter] register_jobs(*jobs) diff --git a/nautobot_device_onboarding/nornir_plays/processor.py b/nautobot_device_onboarding/nornir_plays/processor.py index afd4f3b9..5dc2d5df 100755 --- a/nautobot_device_onboarding/nornir_plays/processor.py +++ b/nautobot_device_onboarding/nornir_plays/processor.py @@ -7,7 +7,7 @@ from nornir_nautobot.exceptions import NornirNautobotException from nornir_nautobot.plugins.processors import BaseLoggingProcessor -from nautobot_device_onboarding.utils.formatter import format_ob_data_ios, format_ob_data_nxos, format_ob_data_junos +from nautobot_device_onboarding.utils.formatter import format_ob_data_ios, format_ob_data_junos, format_ob_data_nxos class ProcessorDO(BaseLoggingProcessor): diff --git a/nautobot_device_onboarding/utils/inventory_creator.py b/nautobot_device_onboarding/utils/inventory_creator.py index 73990995..a0d2a8c4 100755 --- a/nautobot_device_onboarding/utils/inventory_creator.py +++ b/nautobot_device_onboarding/utils/inventory_creator.py @@ -3,10 +3,11 @@ from django.conf import settings from nautobot.dcim.models import Platform from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices -from nautobot_device_onboarding.exceptions import OnboardException from netmiko import SSHDetect from nornir.core.inventory import ConnectionOptions, Host +from nautobot_device_onboarding.exceptions import OnboardException + def _parse_credentials(credentials): """Parse and return dictionary of credentials.""" From 7ae76cc67c880be42a8ea3499c508b9b63ab7d5e Mon Sep 17 00:00:00 2001 From: David Cates Date: Tue, 6 Feb 2024 11:19:43 -0700 Subject: [PATCH 040/225] black, flake8, pydocstyle --- nautobot_device_onboarding/constants.py | 1 + .../diffsync/models/onboarding_models.py | 1 + nautobot_device_onboarding/utils/formatter.py | 1 + nautobot_device_onboarding/utils/inventory_creator.py | 3 ++- 4 files changed, 5 insertions(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/constants.py b/nautobot_device_onboarding/constants.py index d2c34fdb..f9c9f91f 100644 --- a/nautobot_device_onboarding/constants.py +++ b/nautobot_device_onboarding/constants.py @@ -14,3 +14,4 @@ "cisco_nxos": ["show version", "show inventory", "show interface"], "cisco_xe": ["show version", "show inventory", "show interfaces"], "juniper_junos": ["show version", "show interfaces", "show chassis hardware"], +} diff --git a/nautobot_device_onboarding/diffsync/models/onboarding_models.py b/nautobot_device_onboarding/diffsync/models/onboarding_models.py index f25d3064..1f45ac83 100644 --- a/nautobot_device_onboarding/diffsync/models/onboarding_models.py +++ b/nautobot_device_onboarding/diffsync/models/onboarding_models.py @@ -2,6 +2,7 @@ import ipaddress from typing import Optional + from diffsync import DiffSyncModel from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist, ValidationError from nautobot.apps.choices import InterfaceTypeChoices, PrefixTypeChoices diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index 76b46fac..b1757ff2 100644 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -57,6 +57,7 @@ def format_ob_data_nxos(host, result): formatted_data["mgmt_interface"] = interface_name break + def format_ob_data_junos(host, result): """Format the data for onboarding Juniper JUNOS devices.""" primary_ip4 = host.name diff --git a/nautobot_device_onboarding/utils/inventory_creator.py b/nautobot_device_onboarding/utils/inventory_creator.py index 8767c0fb..7f821c5f 100755 --- a/nautobot_device_onboarding/utils/inventory_creator.py +++ b/nautobot_device_onboarding/utils/inventory_creator.py @@ -1,7 +1,6 @@ """Inventory Creator and Helpers.""" from django.conf import settings -from nautobot.dcim.models import Platform from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from netmiko import SSHDetect from nornir.core.inventory import ConnectionOptions, Host @@ -36,6 +35,7 @@ def _parse_credentials(credentials): secret = settings.NAPALM_ARGS.get("secret", None) return (username, password, secret) + def guess_netmiko_device_type(hostname, username, password, port): """Guess the device type of host, based on Netmiko.""" guessed_device_type = None @@ -59,6 +59,7 @@ def guess_netmiko_device_type(hostname, username, password, port): print(f"************************Guessed device type: {guessed_device_type}") return guessed_device_type + def _set_inventory(host_ip, platform, port, secrets_group): """Construct Nornir Inventory.""" inv = {} From f33b2f74046cedaab75373a792952f8acf1f991a Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Tue, 6 Feb 2024 18:55:50 +0000 Subject: [PATCH 041/225] pylint --- nautobot_device_onboarding/jobs.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index b0259c6d..f64bc4c1 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -1,3 +1,4 @@ +# pylint: disable=attribute-defined-outside-init """Device Onboarding Jobs.""" import logging @@ -537,7 +538,7 @@ def _process_result(self, command_result, ip_addresses): for ip_address in ip_addresses: processed_device_data[ip_address] = command_result[ip_address] if self.debug: - self.logger.debug( + self.logger.debug( # pylint: disable=logging-fstring-interpolation f"Processed CommandGetterNetworkImporter return for {ip_address}: {command_result[ip_address]}" ) return processed_device_data From 5bf41775546bbcf9983a7953ef870d327b61b82a Mon Sep 17 00:00:00 2001 From: David Cates Date: Tue, 6 Feb 2024 23:00:32 -0700 Subject: [PATCH 042/225] update ssot network importer --- .../adapters/network_importer_adapters.py | 226 +++++++++++++++- .../models/network_importer_models.py | 242 ++++++++++++++++-- .../diffsync/models/onboarding_models.py | 3 +- nautobot_device_onboarding/jobs.py | 3 +- 4 files changed, 443 insertions(+), 31 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index 7fede25c..aca64fd2 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -1,9 +1,14 @@ """DiffSync adapters.""" -from nautobot_ssot.contrib import NautobotAdapter +import ipaddress import diffsync +from nautobot_ssot.contrib import NautobotAdapter +from netaddr import EUI, mac_unix_expanded + from nautobot_device_onboarding.diffsync.models import network_importer_models +from nautobot.ipam.models import IPAddressToInterface +from nautobot.dcim.models import Device ####################################### # FOR TESTING ONLY - TO BE REMOVED # @@ -14,19 +19,72 @@ "interfaces": { "GigabitEthernet1": { "mgmt_only": True, - "ip_addresses": ["10.1.1.8"], + "status": "Active", + "type": "100base-tx", + "ip_addresses": [ + {"host": "10.1.1.8", "mask_length": 32}, + ], + "mac_address": "d8b1.905c.5130", + "mtu": "1500", + "description": "", + "enabled": True, + "802.1Q_mode": "", + "lag": "", }, "GigabitEthernet2": { "mgmt_only": False, - "ip_addresses": ["10.1.1.9"], + "status": "Active", + "type": "100base-tx", + "ip_addresses": [ + {"host": "10.1.1.9", "mask_length": 32}, + ], + "mac_address": "d8b1.905c.6130", + "mtu": "1500", + "description": "uplink Po1", + "enabled": True, + "802.1Q_mode": "tagged-all", + "lag": "Po1", }, "GigabitEthernet3": { "mgmt_only": False, - "ip_addresses": ["10.1.1.10, 10.1.1.11"], + "status": "Active", + "type": "100base-tx", + "ip_addresses": [ + {"host": "10.1.1.10", "mask_length": 32}, + {"host": "10.1.1.11", "mask_length": 30}, + ], + "mac_address": "d8b1.905c.6130", + "mtu": "1500", + "description": "", + "enabled": True, + "802.1Q_mode": "", + "lag": "", }, "GigabitEthernet4": { "mgmt_only": False, + "status": "Active", + "type": "100base-tx", + "ip_addresses": [ + {"host": "10.1.1.12", "mask_length": 32}, + ], + "mac_address": "d8b1.905c.7130", + "mtu": "1500", + "description": "", + "enabled": True, + "802.1Q_mode": "", + "lag": "", + }, + "Po1": { + "mgmt_only": False, + "status": "Active", + "type": "lag", "ip_addresses": [], + "mac_address": "d8b1.905c.8130", + "mtu": "1500", + "description": "", + "enabled": True, + "802.1Q_mode": "", + "lag": "", }, }, }, @@ -37,7 +95,7 @@ class FilteredNautobotAdapter(NautobotAdapter): """ - Allow for filtering of data loaded from Nautobot into DiffSync models. + Allow Nautobot data to be filtered by the Job form inputs. Must be used with FilteredNautobotModel. """ @@ -54,14 +112,63 @@ def _load_objects(self, diffsync_model): # pylint: disable=protected-access self._load_single_object(database_object, diffsync_model, parameter_names) +# TODO: remove this if unused +class mac_unix_expanded_uppercase(mac_unix_expanded): + word_fmt = "%.2X" + + class NetworkImporterNautobotAdapter(FilteredNautobotAdapter): """Adapter for loading Nautobot data.""" device = network_importer_models.NetworkImporterDevice interface = network_importer_models.NetworkImporterInterface - ip_address = network_importer_models.NetworkImporterIPAddress + # ip_address = network_importer_models.NetworkImporterIPAddress + ipaddress_to_interface = network_importer_models.NetworkImporterIPAddressToInterface + prefix = network_importer_models.NetworkImporterPrefix + + top_level = ["prefix", "device", "ipaddress_to_interface"] + + def load_ip_address_to_interfaces(self): + """ + Load the IP address to interface model into the DiffSync store. - top_level = ["device"] + Only interfaces which belong to devices included in the sync should be considered. + """ + filter = {} + if self.job.devices: + filter["id__in"] = [device.id for device in self.job.devices] + if self.job.location: + filter["location"] = self.job.location + if self.job.device_role: + filter["role"] = self.job.device_role + if self.job.tag: + filter["tags"] = self.job.tag + devices_in_sync = Device.objects.filter(**filter) + + for obj in IPAddressToInterface.objects.filter(interface__device__in=devices_in_sync): + network_ip_address_to_interface = self.ipaddress_to_interface( + diffsync=self, + interface__device__name=obj.interface.device.name, + interface__name=obj.interface.name, + ip_address__host=obj.ip_address.host, + ip_address__mask_length=obj.ip_address.mask_length, + ) + self.add(network_ip_address_to_interface) + + def load(self): + """Generic implementation of the load function.""" + if not hasattr(self, "top_level") or not self.top_level: + raise ValueError("'top_level' needs to be set on the class.") + + for model_name in self.top_level: + if model_name is "ipaddress_to_interface": + self.load_ip_address_to_interfaces() + else: + diffsync_model = self._get_diffsync_class(model_name) + + # This function directly mutates the diffsync store, i.e. it will create and load the objects + # for this specific model class as well as its children without returning anything. + self._load_objects(diffsync_model) class NetworkImporterNetworkAdapter(diffsync.DiffSync): @@ -73,16 +180,117 @@ def __init__(self, *args, job, sync=None, **kwargs): self.job = job self.sync = sync + device_data = mock_data + device = network_importer_models.NetworkImporterDevice interface = network_importer_models.NetworkImporterInterface - ip_address = network_importer_models.NetworkImporterIPAddress + # ip_address = network_importer_models.NetworkImporterIPAddress + ipaddress_to_interface = network_importer_models.NetworkImporterIPAddressToInterface + prefix = network_importer_models.NetworkImporterPrefix + + top_level = ["prefix", "device", "ipaddress_to_interface"] - top_level = ["device"] device_data = mock_data def load_devices(self): """Load device data from network devices.""" + for hostname, device_data in self.device_data.items(): + network_device = self.device(diffsync=self, name=hostname, serial=device_data["serial"]) + self.add(network_device) + if self.job.debug: + self.job.logger.debug(f"Device {network_device} loaded.") + for interface_name, interface_data in device_data["interfaces"].items(): + network_interface = self.load_interface(hostname, interface_name, interface_data) + network_device.add_child(network_interface) + if self.job.debug: + self.job.logger.debug(f"Interface {network_interface} loaded.") + + def load_interface(self, hostname, interface_name, interface_data): + """Load data for a single interface into the DiffSync store.""" + network_interface = self.interface( + diffsync=self, + name=interface_name, + device__name=hostname, + status__name=interface_data["status"], + type=interface_data["type"], + # mac_address=interface_data["mac_address"], + # mac_address=EUI(interface_data["mac_address"], version=48, dialect=mac_unix_expanded_uppercase), + mtu=interface_data["mtu"], + description=interface_data["description"], + enabled=interface_data["enabled"], + mode=interface_data["802.1Q_mode"], + mgmt_only=interface_data["mgmt_only"], + lag=interface_data["lag"], + ) + self.add(network_interface) + return network_interface + + def _determine_network(self, ip_address, mask_length): + ip_interface = ipaddress.ip_interface(f"{ip_address}/{mask_length}") + return str(ip_interface.network).split("/")[0] + + def load_prefixes(self): + """Load IP addresses used by interfaces into the DiffSync store.""" + for hostname, device_data in self.device_data.items(): + for interface_name, interface_data in device_data["interfaces"].items(): + for ip_address in interface_data["ip_addresses"]: + network_prefix = self.prefix( + diffsync=self, + namespace__name=self.job.namespace.name, + network=self._determine_network(ip_address=ip_address["host"], mask_length=24), + # TODO: prefix length is hard coded here, can it be determined from the device? + prefix_length=24, + status__name="Active", + ) + try: + self.add(network_prefix) + if self.job.debug: + self.job.logger.debug(f"{network_prefix} loaded.") + except diffsync.exceptions.ObjectAlreadyExists: + pass + + def load_ip_addresses(self): + """Load IP addresses used by interfaces into the DiffSync store.""" + for hostname, device_data in self.device_data.items(): + for interface_name, interface_data in device_data["interfaces"].items(): + for ip_address in interface_data["ip_addresses"]: + network_ip_address = self.ip_address( + diffsync=self, + host=ip_address["host"], + mask_length=ip_address["mask_length"], + type="host", + ip_version=4, + status__name="Active", + ) + try: + self.add(network_ip_address) + if self.job.debug: + self.job.logger.debug(f"{network_ip_address} loaded.") + except diffsync.exceptions.ObjectAlreadyExists: + self.job.warning( + f"{network_ip_address} is already loaded to the " + "DiffSync store. This is a duplicate IP Address." + ) + + def load_ip_address_to_interfaces(self): + """Load the IP address to interface model into the DiffSync store.""" + for hostname, device_data in self.device_data.items(): + for interface_name, interface_data in device_data["interfaces"].items(): + for ip_address in interface_data["ip_addresses"]: + network_ip_address_to_interface = self.ipaddress_to_interface( + diffsync=self, + interface__device__name=hostname, + interface__name=interface_name, + ip_address__host=ip_address["host"], + ip_address__mask_length=ip_address["mask_length"], + ) + self.add(network_ip_address_to_interface) + if self.job.debug: + self.job.logger.debug(f"{network_ip_address_to_interface} loaded.") def load(self): """Load network data.""" + self.load_prefixes() + # self.load_ip_addresses() self.load_devices() + self.load_ip_address_to_interfaces() diff --git a/nautobot_device_onboarding/diffsync/models/network_importer_models.py b/nautobot_device_onboarding/diffsync/models/network_importer_models.py index 0b66d33f..749f8cbd 100644 --- a/nautobot_device_onboarding/diffsync/models/network_importer_models.py +++ b/nautobot_device_onboarding/diffsync/models/network_importer_models.py @@ -1,17 +1,22 @@ """Diffsync models.""" -from typing import List +from dataclasses import dataclass +from typing import List, Optional +from diffsync import DiffSync, DiffSyncModel +from django.core.exceptions import ObjectDoesNotExist, ValidationError from nautobot.dcim.models import Device, Interface -from nautobot.ipam.models import IPAddress +from nautobot.ipam.models import IPAddress, IPAddressToInterface, Prefix from nautobot_ssot.contrib import NautobotModel - -from diffsync import DiffSync +from nautobot.extras.models import Status +from nautobot.apps.choices import PrefixTypeChoices +from netaddr import EUI +import ipaddress class FilteredNautobotModel(NautobotModel): """ - Allow for filtering of data loaded from Nautobot into DiffSync models. + Allow Nautobot data to be filtered by the Job form inputs. Must be used with FilteredNautobotAdapter. """ @@ -35,19 +40,17 @@ def get_queryset(cls, diffsync: "DiffSync"): class NetworkImporterDevice(FilteredNautobotModel): - """Diffsync model for device data.""" + """Shared data model representing a Device.""" _modelname = "device" _model = Device _identifiers = ( - "location__name", "name", "serial", ) _children = {"interface": "interfaces"} name: str - location__name: str serial: str interfaces: List["NetworkImporterInterface"] = [] @@ -73,9 +76,28 @@ def _get_queryset(cls, diffsync: "DiffSync"): diffsync.job.logger.error("No device filter options were provided, no devices will be synced.") return cls._model.objects.none() + @classmethod + def create(cls, diffsync, ids, attrs): + """ + Do not create new devices. + + Devices need to exist in Nautobot prior to syncing data. + """ + diffsync.job.logger.error( + f"{ids} is not included in the devices selected for syncing. " + "This device either does not exist in Nautobot or was not " + "included based on filter criteria provided on the job form." + ) + return None + + def delete(self): + """Delete the ORM object corresponding to this diffsync object.""" + self.job.logger.error(f"{self} will not be deleted.") + return super().delete() + class NetworkImporterInterface(FilteredNautobotModel): - """Diffsync model for interface data.""" + """Shared data model representing an Interface.""" _modelname = "interface" _model = Interface @@ -83,22 +105,202 @@ class NetworkImporterInterface(FilteredNautobotModel): "device__name", "name", ) - _children = {"ip_address": "ip_addresses"} + _attributes = ( + "status__name", + "type", + # "mac_address", + "mtu", + # "parent_interface__name", + # "lag__name", + "mode", + "mgmt_only", + # tagged vlan, + # untagged vlans, + ) + + # _children = {"ip_address": "ip_addresses"} + device__name: str name: str - ip_addresses: List["NetworkImporterIPAddress"] = [] + status__name: Optional[str] + type: Optional[str] + mac_address: Optional[EUI] + mtu: Optional[str] + parent_interface__name: Optional[str] + # lag__name: Optional[str] + mode: Optional[str] + mgmt_only: Optional[bool] + # ip_addresses: List["NetworkImporterIPAddress"] = [] -class NetworkImporterIPAddress(FilteredNautobotModel): - """Diffsync model for ip address data.""" - _modelname = "ip_address" - _model = IPAddress - _identifiers = ( - "parent__namespace__name", - "host", +class NetworkImporterPrefix(FilteredNautobotModel): + """Shared data model representing a Prefix.""" + + _model = Prefix + _modelname = "prefix" + _identifiers = ("network", "namespace__name") + _attributes = ( + "prefix_length", + "status__name", ) - parent__namespace__name: str - host: str + network: str + namespace__name: str + + prefix_length: int + status__name: str + + @classmethod + def _get_queryset(cls, diffsync: "DiffSync"): + """Get the queryset used to load the models data from Nautobot.""" + prefixes = Prefix.objects.filter(namespace__name=diffsync.job.namespace.name) + return prefixes + + +# class NetworkImporterIPAddress(FilteredNautobotModel): +# """Shared data model representing an IPAddress.""" + +# _modelname = "ip_address" +# _model = IPAddress +# _identifiers = ("host",) +# _attributes = ("type", "ip_version", "mask_length", "status__name") + +# host: str + +# mask_length: int +# type: str +# ip_version: int +# status__name: str + +# @classmethod +# def _get_queryset(cls, diffsync: "DiffSync"): +# """Get the queryset used to load the models data from Nautobot.""" +# ip_addresses = IPAddress.objects.filter(parent__namespace__name=diffsync.job.namespace.name) +# return ip_addresses + + +class NetworkImporterIPAddressToInterface(DiffSyncModel): + """Shared data model representing an IPAddressToInterface.""" + + # _model = IPAddressToInterface + _modelname = "ipaddress_to_interface" + _identifiers = ("interface__device__name", "interface__name", "ip_address__host") + _attributes = ("ip_address__mask_length",) + + interface__device__name: str + interface__name: str + ip_address__host: str + ip_address__mask_length: str + + # @classmethod + # def create(cls, diffsync, ids, attrs): + # """ + # Do not attempt to assign interfaces that are not in the queryset of synced devices. + # """ + # filter = {} + # if diffsync.job.devices: + # filter["id__in"] = [device.id for device in diffsync.job.devices] + # if diffsync.job.location: + # filter["location"] = diffsync.job.location + # if diffsync.job.device_role: + # filter["role"] = diffsync.job.device_role + # if diffsync.job.tag: + # filter["tags"] = diffsync.job.tag + # devices_in_sync = Device.objects.filter(**filter).values_list("name", flat=True) + + # try: + # device = Device.objects.get(name=ids["interface__device__name"]) + # if device.name in devices_in_sync: + # return super().create(diffsync, ids, attrs) + # else: + # return None + # except ObjectDoesNotExist: + # return None + + @classmethod + def _get_or_create_ip_address(cls, ids, attrs, diffsync): + """Attempt to get a Nautobot IP Address, create a new one if necessary.""" + ip_address = None + default_status = Status.objects.get(name="Active") + try: + ip_address = IPAddress.objects.get( + host=ids["ip_address__host"], + mask_length=attrs["ip_address__mask_length"], + parent__namespace=diffsync.job.namespace, + ) + except ObjectDoesNotExist: + try: + ip_address = IPAddress.objects.create( + address=f"{ids['ip_address__host']}/{attrs['ip_address__mask_length']}", + namespace=diffsync.job.namespace, + status=default_status, + ) + except ValidationError: + diffsync.job.logger.warning( + f"No suitable parent Prefix exists for IP {ids['hostt']} in " + f"Namespace {diffsync.job.namespace.name}, a new Prefix will be created." + ) + new_prefix = ipaddress.ip_interface(f"{attrs['ip_address__host']}/{attrs['ip_address__mask_length']}").network + try: + prefix = Prefix.objects.get( + prefix=f"{new_prefix.network}", + namespace=diffsync.job.namespace, + ) + except ObjectDoesNotExist: + prefix = Prefix.objects.create( + prefix=f"{new_prefix.network}", + namespace=diffsync.job.namespace, + type=PrefixTypeChoices.TYPE_NETWORK, + status=default_status, + ) + ip_address = IPAddress.objects.create( + address=f"{ids['ip_address__host']}/{attrs['ip_address__mask_length']}", + status=default_status, + parent=prefix, + ) + return ip_address + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create a new IPAddressToInterface object.""" + try: + interface = Interface.objects.get( + device__name=ids["interface__device__name"], + name=ids["interface__name"], + ) + ip_address_to_interface_obj = IPAddressToInterface( + interface=interface, + ip_address=cls._get_or_create_ip_address(ids, attrs, diffsync) + ) + ip_address_to_interface_obj.validated_save() + except ValidationError as err: + diffsync.job.logger.error(f"{ids} failed to create, {err}") + return super().create(diffsync, ids, attrs) + + def update(self, attrs): + """Update an existing IPAddressToInterface object.""" + ip_address_to_interface = IPAddressToInterface.objects.get(**self.get_identifiers()) + + if self.diffsync.job.debug: + self.diffsync.job.logger.debug(f"Updating {ip_address_to_interface} with attrs: {attrs}") + if attrs.get("ip_address__mask_length"): + ip_address = ip_address_to_interface.ip_address + ip_address.mask_length = attrs["ip_address__mask_length"] + try: + ip_address.validated_save() + except ValidationError as err: + self.job.logger.error(f"{ip_address} failed to create, {err}") + + return super().update(attrs) + + def delete(self): + """Delete an IPAddressToInterface object.""" + obj = self._model.objects.get(**self.get_identifiers()) + obj.delete() + return super().delete() + +# TODO: Vlan Model + +# TODO: Cable Model diff --git a/nautobot_device_onboarding/diffsync/models/onboarding_models.py b/nautobot_device_onboarding/diffsync/models/onboarding_models.py index 1f45ac83..03d8d995 100644 --- a/nautobot_device_onboarding/diffsync/models/onboarding_models.py +++ b/nautobot_device_onboarding/diffsync/models/onboarding_models.py @@ -116,7 +116,8 @@ def _get_or_create_ip_address(cls, diffsync, attrs): f"No suitable parent Prefix exists for IP {attrs['primary_ip4__host']} in " f"Namespace {diffsync.job.namespace.name}, a new Prefix will be created." ) - new_prefix = ipaddress.ip_interface(f"{attrs['primary_ip4__host']}/{attrs['mask_length']}") + # TODO: Test this implementation of new_prefix + new_prefix = ipaddress.ip_interface(f"{attrs['primary_ip4__host']}/{attrs['mask_length']}").network try: prefix = Prefix.objects.get( prefix=f"{new_prefix.network}", diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index f64bc4c1..dbbd9809 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -8,6 +8,7 @@ from nautobot.apps.jobs import BooleanVar, IntegerVar, Job, MultiObjectVar, ObjectVar, StringVar from nautobot.core.celery import register_jobs from nautobot.dcim.models import Device, DeviceType, Location, Platform +from nautobot.ipam.models import Prefix from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from nautobot.extras.models import Role, SecretsGroup, SecretsGroupAssociation, Status, Tag from nautobot.ipam.models import Namespace @@ -578,5 +579,5 @@ def run(self, *args, **kwargs): return final_result -jobs = [OnboardingTask, SSOTDeviceOnboarding, CommandGetterDO, CommandGetterNetworkImporter] +jobs = [OnboardingTask, SSOTDeviceOnboarding, SSOTNetworkImporter, CommandGetterDO, CommandGetterNetworkImporter] register_jobs(*jobs) From 41a1905a08858f6e64811b245315574b8c979217 Mon Sep 17 00:00:00 2001 From: David Cates Date: Wed, 7 Feb 2024 16:41:11 -0700 Subject: [PATCH 043/225] update network importer ssot --- .../adapters/network_importer_adapters.py | 144 +++++------- .../diffsync/adapters/onboarding_adapters.py | 9 +- .../models/network_importer_models.py | 216 ++++++------------ .../diffsync/models/onboarding_models.py | 78 +++---- nautobot_device_onboarding/jobs.py | 36 ++- .../utils/diffsync_utils.py | 69 ++++++ 6 files changed, 258 insertions(+), 294 deletions(-) create mode 100644 nautobot_device_onboarding/utils/diffsync_utils.py diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index aca64fd2..e419a27a 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -3,12 +3,12 @@ import ipaddress import diffsync +from diffsync.enum import DiffSyncModelFlags +from nautobot.ipam.models import IPAddress from nautobot_ssot.contrib import NautobotAdapter from netaddr import EUI, mac_unix_expanded from nautobot_device_onboarding.diffsync.models import network_importer_models -from nautobot.ipam.models import IPAddressToInterface -from nautobot.dcim.models import Device ####################################### # FOR TESTING ONLY - TO BE REMOVED # @@ -24,19 +24,24 @@ "ip_addresses": [ {"host": "10.1.1.8", "mask_length": 32}, ], - "mac_address": "d8b1.905c.5130", + "mac_address": "d8b1.905c.5170", "mtu": "1500", "description": "", "enabled": True, - "802.1Q_mode": "", + "802.1Q_mode": "Tagged (All)", "lag": "", + "untagged_vlan": {"name": "vlan60", "id": "60"}, + "tagged_vlans": [ + {"name": "vlan40", "id": "40"}, + {"name": "vlan50", "id": "50"} + ] }, "GigabitEthernet2": { "mgmt_only": False, "status": "Active", "type": "100base-tx", "ip_addresses": [ - {"host": "10.1.1.9", "mask_length": 32}, + {"host": "10.1.1.9", "mask_length": 24}, ], "mac_address": "d8b1.905c.6130", "mtu": "1500", @@ -44,14 +49,16 @@ "enabled": True, "802.1Q_mode": "tagged-all", "lag": "Po1", + "untagged_vlan": "", + "tagged_vlans": [] }, "GigabitEthernet3": { "mgmt_only": False, "status": "Active", "type": "100base-tx", "ip_addresses": [ - {"host": "10.1.1.10", "mask_length": 32}, - {"host": "10.1.1.11", "mask_length": 30}, + {"host": "10.1.1.10", "mask_length": 24}, + {"host": "10.1.1.11", "mask_length": 22}, ], "mac_address": "d8b1.905c.6130", "mtu": "1500", @@ -59,13 +66,15 @@ "enabled": True, "802.1Q_mode": "", "lag": "", + "untagged_vlan": "", + "tagged_vlans": [] }, "GigabitEthernet4": { "mgmt_only": False, "status": "Active", "type": "100base-tx", "ip_addresses": [ - {"host": "10.1.1.12", "mask_length": 32}, + {"host": "10.1.1.12", "mask_length": 20}, ], "mac_address": "d8b1.905c.7130", "mtu": "1500", @@ -73,6 +82,8 @@ "enabled": True, "802.1Q_mode": "", "lag": "", + "untagged_vlan": "", + "tagged_vlans": [] }, "Po1": { "mgmt_only": False, @@ -85,6 +96,8 @@ "enabled": True, "802.1Q_mode": "", "lag": "", + "untagged_vlan": "", + "tagged_vlans": [] }, }, }, @@ -104,56 +117,43 @@ def _load_objects(self, diffsync_model): # pylint: disable=protected-access """Given a diffsync model class, load a list of models from the database and return them.""" parameter_names = self._get_parameter_names(diffsync_model) for database_object in diffsync_model._get_queryset(diffsync=self): # pylint: disable=protected-access - self.job.logger.debug( - f"LOADING: Database Object: {database_object}, " - f"Model Name: {diffsync_model._modelname}, " # pylint: disable=protected-access - f"Parameter Names: {parameter_names}" - ) self._load_single_object(database_object, diffsync_model, parameter_names) -# TODO: remove this if unused -class mac_unix_expanded_uppercase(mac_unix_expanded): - word_fmt = "%.2X" - - class NetworkImporterNautobotAdapter(FilteredNautobotAdapter): """Adapter for loading Nautobot data.""" device = network_importer_models.NetworkImporterDevice interface = network_importer_models.NetworkImporterInterface - # ip_address = network_importer_models.NetworkImporterIPAddress + ip_address = network_importer_models.NetworkImporterIPAddress ipaddress_to_interface = network_importer_models.NetworkImporterIPAddressToInterface - prefix = network_importer_models.NetworkImporterPrefix - top_level = ["prefix", "device", "ipaddress_to_interface"] + top_level = ["ip_address", "device", "ipaddress_to_interface"] - def load_ip_address_to_interfaces(self): - """ - Load the IP address to interface model into the DiffSync store. - - Only interfaces which belong to devices included in the sync should be considered. - """ - filter = {} - if self.job.devices: - filter["id__in"] = [device.id for device in self.job.devices] - if self.job.location: - filter["location"] = self.job.location - if self.job.device_role: - filter["role"] = self.job.device_role - if self.job.tag: - filter["tags"] = self.job.tag - devices_in_sync = Device.objects.filter(**filter) - - for obj in IPAddressToInterface.objects.filter(interface__device__in=devices_in_sync): - network_ip_address_to_interface = self.ipaddress_to_interface( + def load_param_mac_address(self, parameter_name, database_object): + """Convert interface mac_address to string""" + return str(database_object.mac_address) + + def load_ip_addresses(self): + """Load IP addresses into the DiffSync store.""" + for ip_address in IPAddress.objects.filter(parent__namespace__name=self.job.namespace.name): + network_ip_address = self.ip_address( diffsync=self, - interface__device__name=obj.interface.device.name, - interface__name=obj.interface.name, - ip_address__host=obj.ip_address.host, - ip_address__mask_length=obj.ip_address.mask_length, + host=ip_address.host, + mask_length=ip_address.mask_length, + type=ip_address.type, + ip_version=ip_address.ip_version, + status__name=ip_address.status.name, ) - self.add(network_ip_address_to_interface) + try: + network_ip_address.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(network_ip_address) + if self.job.debug: + self.job.logger.debug(f"{network_ip_address} loaded.") + except diffsync.exceptions.ObjectAlreadyExists: + self.job.warning( + f"{network_ip_address} is already loaded to the " "DiffSync store. This is a duplicate IP Address." + ) def load(self): """Generic implementation of the load function.""" @@ -161,8 +161,8 @@ def load(self): raise ValueError("'top_level' needs to be set on the class.") for model_name in self.top_level: - if model_name is "ipaddress_to_interface": - self.load_ip_address_to_interfaces() + if model_name is "ip_address": + self.load_ip_addresses() else: diffsync_model = self._get_diffsync_class(model_name) @@ -171,6 +171,10 @@ def load(self): self._load_objects(diffsync_model) +class mac_unix_expanded_uppercase(mac_unix_expanded): + word_fmt = "%.2X" + + class NetworkImporterNetworkAdapter(diffsync.DiffSync): """Adapter for loading Network data.""" @@ -184,14 +188,17 @@ def __init__(self, *args, job, sync=None, **kwargs): device = network_importer_models.NetworkImporterDevice interface = network_importer_models.NetworkImporterInterface - # ip_address = network_importer_models.NetworkImporterIPAddress + ip_address = network_importer_models.NetworkImporterIPAddress ipaddress_to_interface = network_importer_models.NetworkImporterIPAddressToInterface - prefix = network_importer_models.NetworkImporterPrefix - top_level = ["prefix", "device", "ipaddress_to_interface"] + top_level = ["ip_address", "device", "ipaddress_to_interface"] device_data = mock_data + def _process_mac_address(self, mac_address): + """Convert a mac address to match the value stored by Nautobot.""" + return str(EUI(mac_address, version=48, dialect=mac_unix_expanded_uppercase)) + def load_devices(self): """Load device data from network devices.""" for hostname, device_data in self.device_data.items(): @@ -213,8 +220,7 @@ def load_interface(self, hostname, interface_name, interface_data): device__name=hostname, status__name=interface_data["status"], type=interface_data["type"], - # mac_address=interface_data["mac_address"], - # mac_address=EUI(interface_data["mac_address"], version=48, dialect=mac_unix_expanded_uppercase), + mac_address=self._process_mac_address(interface_data["mac_address"]), mtu=interface_data["mtu"], description=interface_data["description"], enabled=interface_data["enabled"], @@ -225,32 +231,8 @@ def load_interface(self, hostname, interface_name, interface_data): self.add(network_interface) return network_interface - def _determine_network(self, ip_address, mask_length): - ip_interface = ipaddress.ip_interface(f"{ip_address}/{mask_length}") - return str(ip_interface.network).split("/")[0] - - def load_prefixes(self): - """Load IP addresses used by interfaces into the DiffSync store.""" - for hostname, device_data in self.device_data.items(): - for interface_name, interface_data in device_data["interfaces"].items(): - for ip_address in interface_data["ip_addresses"]: - network_prefix = self.prefix( - diffsync=self, - namespace__name=self.job.namespace.name, - network=self._determine_network(ip_address=ip_address["host"], mask_length=24), - # TODO: prefix length is hard coded here, can it be determined from the device? - prefix_length=24, - status__name="Active", - ) - try: - self.add(network_prefix) - if self.job.debug: - self.job.logger.debug(f"{network_prefix} loaded.") - except diffsync.exceptions.ObjectAlreadyExists: - pass - def load_ip_addresses(self): - """Load IP addresses used by interfaces into the DiffSync store.""" + """Load IP addresses into the DiffSync store.""" for hostname, device_data in self.device_data.items(): for interface_name, interface_data in device_data["interfaces"].items(): for ip_address in interface_data["ip_addresses"]: @@ -260,12 +242,11 @@ def load_ip_addresses(self): mask_length=ip_address["mask_length"], type="host", ip_version=4, - status__name="Active", + status__name=self.job.ip_address_status.name, ) try: + network_ip_address.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST self.add(network_ip_address) - if self.job.debug: - self.job.logger.debug(f"{network_ip_address} loaded.") except diffsync.exceptions.ObjectAlreadyExists: self.job.warning( f"{network_ip_address} is already loaded to the " @@ -290,7 +271,6 @@ def load_ip_address_to_interfaces(self): def load(self): """Load network data.""" - self.load_prefixes() - # self.load_ip_addresses() + self.load_ip_addresses() self.load_devices() self.load_ip_address_to_interfaces() diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index 7115663a..840b0c35 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -14,7 +14,7 @@ # FOR TESTING ONLY - TO BE REMOVED # ####################################### mock_data = { - "10.1.1.8": { + "10.1.1.11": { "hostname": "demo-cisco-xe1", "serial": "9ABUXU581111", "device_type": "CSR1000V17", @@ -24,11 +24,11 @@ "network_driver": "cisco_ios", "mask_length": 16, }, - "10.1.1.9": { + "10.1.1.10": { "hostname": "demo-cisco-xe2", "serial": "9ABUXU5882222", "device_type": "CSR1000V2", - "mgmt_interface": "GigabitEthernet16", + "mgmt_interface": "GigabitEthernet5", "manufacturer": "Cisco", "platform": "IOS", "network_driver": "cisco_ios", @@ -100,8 +100,7 @@ def load_devices(self): if self.job.debug: self.job.logger.debug("Loading Device data from Nautobot...") - # for device in Device.objects.filter(primary_ip4__host__in=self.job.ip_addresses): - for device in Device.objects.all(): + for device in Device.objects.filter(primary_ip4__host__in=self.job.ip_addresses): interface_list = [] # Only interfaces with the device's primeary ip should be considered for diff calculations for interface in device.interfaces.all(): diff --git a/nautobot_device_onboarding/diffsync/models/network_importer_models.py b/nautobot_device_onboarding/diffsync/models/network_importer_models.py index 749f8cbd..06451fc7 100644 --- a/nautobot_device_onboarding/diffsync/models/network_importer_models.py +++ b/nautobot_device_onboarding/diffsync/models/network_importer_models.py @@ -6,12 +6,11 @@ from diffsync import DiffSync, DiffSyncModel from django.core.exceptions import ObjectDoesNotExist, ValidationError from nautobot.dcim.models import Device, Interface -from nautobot.ipam.models import IPAddress, IPAddressToInterface, Prefix -from nautobot_ssot.contrib import NautobotModel from nautobot.extras.models import Status -from nautobot.apps.choices import PrefixTypeChoices -from netaddr import EUI -import ipaddress +from nautobot.ipam.models import IPAddressToInterface, IPAddress +from nautobot_ssot.contrib import NautobotModel + +from nautobot_device_onboarding.utils import diffsync_utils class FilteredNautobotModel(NautobotModel): @@ -81,7 +80,8 @@ def create(cls, diffsync, ids, attrs): """ Do not create new devices. - Devices need to exist in Nautobot prior to syncing data. + Network devices need to exist in Nautobot prior to syncing data and + need to be included in the queryset generated based on job form inputs. """ diffsync.job.logger.error( f"{ids} is not included in the devices selected for syncing. " @@ -108,7 +108,7 @@ class NetworkImporterInterface(FilteredNautobotModel): _attributes = ( "status__name", "type", - # "mac_address", + "mac_address", "mtu", # "parent_interface__name", # "lag__name", @@ -118,73 +118,68 @@ class NetworkImporterInterface(FilteredNautobotModel): # untagged vlans, ) - # _children = {"ip_address": "ip_addresses"} - device__name: str name: str status__name: Optional[str] type: Optional[str] - mac_address: Optional[EUI] + mac_address: Optional[str] mtu: Optional[str] parent_interface__name: Optional[str] # lag__name: Optional[str] mode: Optional[str] mgmt_only: Optional[bool] - # ip_addresses: List["NetworkImporterIPAddress"] = [] +class NetworkImporterIPAddress(DiffSyncModel): + """Shared data model representing an IPAddress.""" -class NetworkImporterPrefix(FilteredNautobotModel): - """Shared data model representing a Prefix.""" - - _model = Prefix - _modelname = "prefix" - _identifiers = ("network", "namespace__name") - _attributes = ( - "prefix_length", - "status__name", - ) + _modelname = "ip_address" + _identifiers = ("host",) + _attributes = ("type", "ip_version", "mask_length", "status__name") - network: str - namespace__name: str + host: str - prefix_length: int + mask_length: int + type: str + ip_version: int status__name: str @classmethod - def _get_queryset(cls, diffsync: "DiffSync"): - """Get the queryset used to load the models data from Nautobot.""" - prefixes = Prefix.objects.filter(namespace__name=diffsync.job.namespace.name) - return prefixes - - -# class NetworkImporterIPAddress(FilteredNautobotModel): -# """Shared data model representing an IPAddress.""" - -# _modelname = "ip_address" -# _model = IPAddress -# _identifiers = ("host",) -# _attributes = ("type", "ip_version", "mask_length", "status__name") + def create(cls, diffsync, ids, attrs): + """Create a new IPAddressToInterface object.""" + diffsync_utils.get_or_create_ip_address( + host=ids["host"], + mask_length=attrs["mask_length"], + namespace=diffsync.job.namespace, + default_ip_status=diffsync.job.ip_address_status, + default_prefix_status=diffsync.job.default_prefix_status, + job=diffsync.job, + ) + return super().create(diffsync, ids, attrs) -# host: str + def update(self, attrs): + """Update an existing IPAddressToInterface object.""" + ip_address = IPAddress.objects.get(**self.get_identifiers()) -# mask_length: int -# type: str -# ip_version: int -# status__name: str + if self.diffsync.job.debug: + self.diffsync.job.logger.debug(f"Updating {self} with attrs: {attrs}") + if attrs.get("mask_length"): + ip_address.mask_length = attrs["mask_length"] + if attrs.get("status__name"): + ip_address.status = Status.objects.get(name=attrs["status__name"]) + try: + ip_address.validated_save() + except ValidationError as err: + self.job.logger.error(f"{self} failed to update, {err}") -# @classmethod -# def _get_queryset(cls, diffsync: "DiffSync"): -# """Get the queryset used to load the models data from Nautobot.""" -# ip_addresses = IPAddress.objects.filter(parent__namespace__name=diffsync.job.namespace.name) -# return ip_addresses + return super().update(attrs) -class NetworkImporterIPAddressToInterface(DiffSyncModel): +class NetworkImporterIPAddressToInterface(FilteredNautobotModel): """Shared data model representing an IPAddressToInterface.""" - # _model = IPAddressToInterface + _model = IPAddressToInterface _modelname = "ipaddress_to_interface" _identifiers = ("interface__device__name", "interface__name", "ip_address__host") _attributes = ("ip_address__mask_length",) @@ -194,113 +189,32 @@ class NetworkImporterIPAddressToInterface(DiffSyncModel): ip_address__host: str ip_address__mask_length: str - # @classmethod - # def create(cls, diffsync, ids, attrs): - # """ - # Do not attempt to assign interfaces that are not in the queryset of synced devices. - # """ - # filter = {} - # if diffsync.job.devices: - # filter["id__in"] = [device.id for device in diffsync.job.devices] - # if diffsync.job.location: - # filter["location"] = diffsync.job.location - # if diffsync.job.device_role: - # filter["role"] = diffsync.job.device_role - # if diffsync.job.tag: - # filter["tags"] = diffsync.job.tag - # devices_in_sync = Device.objects.filter(**filter).values_list("name", flat=True) - - # try: - # device = Device.objects.get(name=ids["interface__device__name"]) - # if device.name in devices_in_sync: - # return super().create(diffsync, ids, attrs) - # else: - # return None - # except ObjectDoesNotExist: - # return None - - @classmethod - def _get_or_create_ip_address(cls, ids, attrs, diffsync): - """Attempt to get a Nautobot IP Address, create a new one if necessary.""" - ip_address = None - default_status = Status.objects.get(name="Active") - try: - ip_address = IPAddress.objects.get( - host=ids["ip_address__host"], - mask_length=attrs["ip_address__mask_length"], - parent__namespace=diffsync.job.namespace, - ) - except ObjectDoesNotExist: - try: - ip_address = IPAddress.objects.create( - address=f"{ids['ip_address__host']}/{attrs['ip_address__mask_length']}", - namespace=diffsync.job.namespace, - status=default_status, - ) - except ValidationError: - diffsync.job.logger.warning( - f"No suitable parent Prefix exists for IP {ids['hostt']} in " - f"Namespace {diffsync.job.namespace.name}, a new Prefix will be created." - ) - new_prefix = ipaddress.ip_interface(f"{attrs['ip_address__host']}/{attrs['ip_address__mask_length']}").network - try: - prefix = Prefix.objects.get( - prefix=f"{new_prefix.network}", - namespace=diffsync.job.namespace, - ) - except ObjectDoesNotExist: - prefix = Prefix.objects.create( - prefix=f"{new_prefix.network}", - namespace=diffsync.job.namespace, - type=PrefixTypeChoices.TYPE_NETWORK, - status=default_status, - ) - ip_address = IPAddress.objects.create( - address=f"{ids['ip_address__host']}/{attrs['ip_address__mask_length']}", - status=default_status, - parent=prefix, - ) - return ip_address - @classmethod def create(cls, diffsync, ids, attrs): - """Create a new IPAddressToInterface object.""" - try: - interface = Interface.objects.get( - device__name=ids["interface__device__name"], - name=ids["interface__name"], - ) - ip_address_to_interface_obj = IPAddressToInterface( - interface=interface, - ip_address=cls._get_or_create_ip_address(ids, attrs, diffsync) - ) - ip_address_to_interface_obj.validated_save() - except ValidationError as err: - diffsync.job.logger.error(f"{ids} failed to create, {err}") - return super().create(diffsync, ids, attrs) + """ + Do not attempt to assign IP addresses to interfaces that are not in the queryset of synced devices. + """ + filter = {} + if diffsync.job.devices: + filter["id__in"] = [device.id for device in diffsync.job.devices] + if diffsync.job.location: + filter["location"] = diffsync.job.location + if diffsync.job.device_role: + filter["role"] = diffsync.job.device_role + if diffsync.job.tag: + filter["tags"] = diffsync.job.tag + devices_in_sync = Device.objects.filter(**filter).values_list("name", flat=True) - def update(self, attrs): - """Update an existing IPAddressToInterface object.""" - ip_address_to_interface = IPAddressToInterface.objects.get(**self.get_identifiers()) + try: + device = Device.objects.get(name=ids["interface__device__name"]) + if device.name in devices_in_sync: + return super().create(diffsync, ids, attrs) + else: + return None + except ObjectDoesNotExist: + return None - if self.diffsync.job.debug: - self.diffsync.job.logger.debug(f"Updating {ip_address_to_interface} with attrs: {attrs}") - if attrs.get("ip_address__mask_length"): - ip_address = ip_address_to_interface.ip_address - ip_address.mask_length = attrs["ip_address__mask_length"] - try: - ip_address.validated_save() - except ValidationError as err: - self.job.logger.error(f"{ip_address} failed to create, {err}") - return super().update(attrs) - - def delete(self): - """Delete an IPAddressToInterface object.""" - obj = self._model.objects.get(**self.get_identifiers()) - obj.delete() - return super().delete() - # TODO: Vlan Model # TODO: Cable Model diff --git a/nautobot_device_onboarding/diffsync/models/onboarding_models.py b/nautobot_device_onboarding/diffsync/models/onboarding_models.py index 03d8d995..2b8cdbb6 100644 --- a/nautobot_device_onboarding/diffsync/models/onboarding_models.py +++ b/nautobot_device_onboarding/diffsync/models/onboarding_models.py @@ -1,16 +1,16 @@ """Diffsync models.""" -import ipaddress from typing import Optional from diffsync import DiffSyncModel from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist, ValidationError -from nautobot.apps.choices import InterfaceTypeChoices, PrefixTypeChoices +from nautobot.apps.choices import InterfaceTypeChoices from nautobot.dcim.models import Device, DeviceType, Interface, Manufacturer, Platform from nautobot.extras.models import Role, SecretsGroup, Status -from nautobot.ipam.models import IPAddress, Prefix from nautobot_ssot.contrib import NautobotModel +from nautobot_device_onboarding.utils import diffsync_utils + class OnboardingDevice(DiffSyncModel): """Diffsync model for device data.""" @@ -94,49 +94,6 @@ def _get_or_create_device(cls, platform, diffsync, ids, attrs): device.validated_save() return device - @classmethod - def _get_or_create_ip_address(cls, diffsync, attrs): - """Attempt to get a Nautobot IP Address, create a new one if necessary.""" - ip_address = None - try: - ip_address = IPAddress.objects.get( - host=attrs["primary_ip4__host"], - mask_length=attrs["mask_length"], - parent__namespace=diffsync.job.namespace, - ) - except ObjectDoesNotExist: - try: - ip_address = IPAddress.objects.create( - address=f"{attrs['primary_ip4__host']}/{attrs['mask_length']}", - namespace=diffsync.job.namespace, - status=diffsync.job.ip_address_status, - ) - except ValidationError: - diffsync.job.logger.warning( - f"No suitable parent Prefix exists for IP {attrs['primary_ip4__host']} in " - f"Namespace {diffsync.job.namespace.name}, a new Prefix will be created." - ) - # TODO: Test this implementation of new_prefix - new_prefix = ipaddress.ip_interface(f"{attrs['primary_ip4__host']}/{attrs['mask_length']}").network - try: - prefix = Prefix.objects.get( - prefix=f"{new_prefix.network}", - namespace=diffsync.job.namespace, - ) - except ObjectDoesNotExist: - prefix = Prefix.objects.create( - prefix=f"{new_prefix.network}", - namespace=diffsync.job.namespace, - type=PrefixTypeChoices.TYPE_NETWORK, - status=diffsync.job.ip_address_status, - ) - ip_address, _ = IPAddress.objects.get_or_create( - address=f"{attrs['primary_ip4__host']}/{attrs['mask_length']}", - status=diffsync.job.ip_address_status, - parent=prefix, - ) - return ip_address - @classmethod def _get_or_create_interface(cls, diffsync, device, attrs): """Attempt to get a Device Interface, create a new one if necessary.""" @@ -185,7 +142,14 @@ def create(cls, diffsync, ids, attrs): # Get or create Device, Interface and IP Address device = cls._get_or_create_device(platform, diffsync, ids, attrs) if device: - ip_address = cls._get_or_create_ip_address(diffsync=diffsync, attrs=attrs) + ip_address = diffsync_utils.get_or_create_ip_address( + host=attrs["primary_ip4__host"], + mask_length=attrs["mask_length"], + namespace=diffsync.job.namespace, + default_ip_status=diffsync.job.ip_address_status, + default_prefix_status=diffsync.job.ip_address_status, + job=diffsync.job, + ) interface = cls._get_or_create_interface(diffsync=diffsync, device=device, attrs=attrs) interface.ip_addresses.add(ip_address) interface.validated_save() @@ -229,7 +193,15 @@ def update(self, attrs): # If the primary ip address is being updated, the mask length must be included if not attrs.get("mask_length"): attrs["mask_length"] = device.primary_ip4.mask_length - ip_address = self._get_or_create_ip_address(diffsync=self.diffsync, attrs=attrs) + + ip_address = diffsync_utils.get_or_create_ip_address( + host=attrs["primary_ip4__host"], + mask_length=attrs["mask_length"], + namespace=self.diffsync.job.namespace, + default_ip_status=self.diffsync.job.ip_address_status, + default_prefix_status=self.diffsync.job.ip_address_status, + job=self.diffsync.job, + ) interface.ip_addresses.add(ip_address) interface.validated_save() # set the new ip address as the device primary ip address @@ -265,7 +237,15 @@ def update(self, attrs): if attrs.get("primary_ip4__host"): if not attrs.get("mask_length"): attrs["mask_length"] = device.primary_ip4.mask_length - ip_address = self._get_or_create_ip_address(diffsync=self.diffsync, attrs=attrs) + + ip_address = diffsync_utils.get_or_create_ip_address( + host=attrs["primary_ip4__host"], + mask_length=attrs["mask_length"], + namespace=self.diffsync.job.namespace, + default_ip_status=self.diffsync.job.ip_address_status, + default_prefix_status=self.diffsync.job.ip_address_status, + job=self.diffsync.job, + ) interface = Interface.objects.get( device=device, ip_addresses__in=[device.primary_ip4], name=self.get_attrs()["interfaces"][0] ) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index dbbd9809..2de838b2 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -8,7 +8,6 @@ from nautobot.apps.jobs import BooleanVar, IntegerVar, Job, MultiObjectVar, ObjectVar, StringVar from nautobot.core.celery import register_jobs from nautobot.dcim.models import Device, DeviceType, Location, Platform -from nautobot.ipam.models import Prefix from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from nautobot.extras.models import Role, SecretsGroup, SecretsGroupAssociation, Status, Tag from nautobot.ipam.models import Namespace @@ -355,11 +354,6 @@ def run( class SSOTNetworkImporter(DataSource): # pylint: disable=too-many-instance-attributes """Job syncing extended device attributes into Nautobot.""" - # def __init__(self): - # """Initialize SSOTDeviceOnboarding.""" - # super().__init__() - # self.diffsync_flags = DiffSyncFlags.SKIP_UNMATCHED_DST - class Meta: # pylint: disable=too-few-public-methods """Metadata about this Job.""" @@ -373,6 +367,20 @@ class Meta: # pylint: disable=too-few-public-methods namespace = ObjectVar( model=Namespace, required=True, description="The namespace for all IP addresses created or updated in the sync." ) + ip_address_status = ObjectVar( + label="IP address status", + model=Status, + query_params={"content_types": "ipam.ipaddress"}, + required=True, + description="Status to be applied to all synced IP addresses. This will update existing IP address statuses", + ) + default_prefix_status = ObjectVar( + label="Prefix status", + model=Status, + query_params={"content_types": "ipam.prefix"}, + required=True, + description="Status to be applied to all new created prefixes. This value does not update with additional syncs.", + ) devices = MultiObjectVar( model=Device, required=False, @@ -408,13 +416,27 @@ def load_target_adapter(self): self.target_adapter.load() def run( - self, dryrun, memory_profiling, debug, namespace, location, devices, device_role, tag, *args, **kwargs + self, + dryrun, + memory_profiling, + debug, + namespace, + ip_address_status, + default_prefix_status, + location, + devices, + device_role, + tag, + *args, + **kwargs ): # pylint:disable=arguments-differ, disable=too-many-arguments """Run sync.""" self.dryrun = dryrun self.memory_profiling = memory_profiling self.debug = debug self.namespace = namespace + self.ip_address_status = ip_address_status + self.default_prefix_status = default_prefix_status self.location = location self.devices = devices self.device_role = device_role diff --git a/nautobot_device_onboarding/utils/diffsync_utils.py b/nautobot_device_onboarding/utils/diffsync_utils.py new file mode 100644 index 00000000..afb11aca --- /dev/null +++ b/nautobot_device_onboarding/utils/diffsync_utils.py @@ -0,0 +1,69 @@ +"""Utility functions for use with diffsync.""" + +import ipaddress + +from django.core.exceptions import ObjectDoesNotExist, ValidationError +from nautobot.apps.choices import PrefixTypeChoices +from nautobot.extras.models import Status +from nautobot.ipam.models import IPAddress, Prefix + + +def get_or_create_prefix(host, mask_length, default_status, namespace, job=None): + """Attempt to get a Nautobot Prefix, create a new one if necessary.""" + prefix = None + new_network = ipaddress.ip_interface(f"{host}/{mask_length}") + try: + prefix = Prefix.objects.get( + prefix=f"{new_network.network}", + namespace=namespace, + ) + except ObjectDoesNotExist: + prefix = Prefix( + prefix=f"{new_network.network}", + namespace=namespace, + type=PrefixTypeChoices.TYPE_NETWORK, + status=default_status, + ) + try: + prefix.validated_save() + except ValidationError as err: + if job: + job.logger.error(f"Prefix {host} failed to create, {err}") + return prefix + + +def get_or_create_ip_address(host, mask_length, namespace, default_ip_status, default_prefix_status, job=None): + """Attempt to get a Nautobot IPAddress, create a new one if necessary.""" + ip_address = None + default_status = Status.objects.get(name="Active") + try: + ip_address = IPAddress.objects.get( + host=host, + parent__namespace=namespace, + ) + except ObjectDoesNotExist: + try: + ip_address = IPAddress( + address=f"{host}/{mask_length}", + namespace=namespace, + status=default_ip_status, + ) + ip_address.validated_save() + except ValidationError: + if job: + job.logger.warning( + f"No suitable parent Prefix exists for IP {host} in " + f"Namespace {namespace.name}, a new Prefix will be created." + ) + prefix = get_or_create_prefix(host, mask_length, default_prefix_status, namespace, job) + ip_address = IPAddress.objects.create( + address=f"{host}/{mask_length}", + status=default_ip_status, + parent=prefix, + ) + try: + ip_address.validated_save() + except ValidationError as err: + if job: + job.logger.error(f"IP Address {host} failed to create, {err}") + return ip_address From 62144a264f848fbd551f56819040254614168753 Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Thu, 8 Feb 2024 18:50:49 +0000 Subject: [PATCH 044/225] update network importer --- nautobot_device_onboarding/jobs.py | 78 +++++++++++++--------- nautobot_device_onboarding/utils/helper.py | 56 ++++++++++++++++ 2 files changed, 102 insertions(+), 32 deletions(-) create mode 100644 nautobot_device_onboarding/utils/helper.py diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 2de838b2..79efa506 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -13,6 +13,7 @@ from nautobot.ipam.models import Namespace from nautobot_ssot.jobs.base import DataSource from nornir import InitNornir +from nornir.core.task import Result, Task from nornir.core.plugins.inventory import InventoryPluginRegister from nautobot_device_onboarding.diffsync.adapters.network_importer_adapters import ( @@ -31,15 +32,22 @@ from nautobot_device_onboarding.nornir_plays.logger import NornirLogger from nautobot_device_onboarding.nornir_plays.processor import ProcessorDO from nautobot_device_onboarding.utils.inventory_creator import _set_inventory +from nautobot_device_onboarding.utils.helper import get_job_filter +from nautobot_device_onboarding.constants import PLATFORM_COMMAND_MAP +from nautobot_plugin_nornir.plugins.inventory.nautobot_orm import NautobotORMInventory +from nautobot_plugin_nornir.constants import NORNIR_SETTINGS +from nornir_nautobot.exceptions import NornirNautobotException + +from nornir_netmiko import netmiko_send_command + +InventoryPluginRegister.register("nautobot-inventory", NautobotORMInventory) InventoryPluginRegister.register("empty-inventory", EmptyInventory) PLUGIN_SETTINGS = settings.PLUGINS_CONFIG["nautobot_device_onboarding"] -NORNIR_SETTINGS = settings.PLUGINS_CONFIG["nautobot_plugin_nornir"] - LOGGER = logging.getLogger(__name__) - +COMMANDS = [] name = "Device Onboarding/Network Importer" # pylint: disable=invalid-name @@ -416,19 +424,19 @@ def load_target_adapter(self): self.target_adapter.load() def run( - self, - dryrun, - memory_profiling, - debug, + self, + dryrun, + memory_profiling, + debug, namespace, ip_address_status, - default_prefix_status, - location, - devices, - device_role, - tag, - *args, - **kwargs + default_prefix_status, + location, + devices, + device_role, + tag, + *args, + **kwargs, ): # pylint:disable=arguments-differ, disable=too-many-arguments """Run sync.""" self.dryrun = dryrun @@ -523,6 +531,9 @@ class CommandGetterNetworkImporter(Job): """Simple Job to Execute Show Command.""" debug = BooleanVar(description="Enable for more verbose logging.") + namespace = ObjectVar( + model=Namespace, required=True, description="The namespace for all IP addresses created or updated in the sync." + ) devices = MultiObjectVar( model=Device, required=False, @@ -546,6 +557,8 @@ class CommandGetterNetworkImporter(Job): required=False, description="Only update devices with the selected tag.", ) + port = IntegerVar(default=22) + timeout = IntegerVar(default=30) class Meta: # pylint: disable=too-few-public-methods """Meta object boilerplate for onboarding.""" @@ -565,40 +578,41 @@ def _process_result(self, command_result, ip_addresses): f"Processed CommandGetterNetworkImporter return for {ip_address}: {command_result[ip_address]}" ) return processed_device_data - + def run(self, *args, **kwargs): """Process onboarding task from ssot-ni job.""" - self.ip_addresses = kwargs["ip_addresses"].replace(" ", "").split(",") - self.port = kwargs["port"] - self.timeout = kwargs["timeout"] - self.secrets_group = kwargs["secrets_group"] - self.platform = kwargs["platform"] - - # Initiate Nornir instance with empty inventory try: - logger = NornirLogger(self.job_result, log_level=0) - compiled_results = {} + qs = get_job_filter(kwargs) with InitNornir( runner=NORNIR_SETTINGS.get("runner"), logging={"enabled": False}, inventory={ "plugin": "nautobot-inventory", + "options": { + "credentials_class": NORNIR_SETTINGS.get("credentials"), + "queryset": qs, + }, }, ) as nornir_obj: - nr_with_processors = nornir_obj.with_processors([ProcessorDO(logger, compiled_results)]) - nr_with_processors.run(task=netmiko_send_commands) + + result = nornir_obj.run(task=netmiko_send_command, command_string="show interfaces", use_textfsm=True) + # for _, data in nornir_obj.inventory.hosts.items(): + # platform = data.dict().get("platform") + # interfaces = nornir_obj.run(task=netmiko_send_command, command_string="show interfaces", use_textfsm=True) + # vlans = nornir_obj.run(task=netmiko_send_command, command_string="show vlan", use_textfsm=True) - final_result = self._process_result(compiled_results, self.ip_addresses) + for _, data in nornir_obj.inventory.hosts.items(): + ip_address = data.dict().get("hostname") + formatted_data = {ip_address: {}} - # Remove before final merge # - for host, data in nr_with_processors.inventory.hosts.items(): - self.logger.info("%s;\n%s", host, data.dict()) - # End # + print(f"Data Dict: {data.dict()}") + print(f"Result: {result[list(result.keys())[0]].result}") except Exception as err: # pylint: disable=broad-exception-caught self.logger.info("Error: %s", err) return err - return final_result + + return formatted_data jobs = [OnboardingTask, SSOTDeviceOnboarding, SSOTNetworkImporter, CommandGetterDO, CommandGetterNetworkImporter] diff --git a/nautobot_device_onboarding/utils/helper.py b/nautobot_device_onboarding/utils/helper.py new file mode 100644 index 00000000..760d934c --- /dev/null +++ b/nautobot_device_onboarding/utils/helper.py @@ -0,0 +1,56 @@ +from nautobot.dcim.filters import DeviceFilterSet +from nautobot.dcim.models import Device +from django.db.models import Q +from nornir_nautobot.exceptions import NornirNautobotException + +FIELDS_PK = { + "location", + "role", +} + +FIELDS_NAME = {"tags"} + + +def get_job_filter(data=None): + """Helper function to return a the filterable list of OS's based on platform.name and a specific custom value.""" + + if not data: + data = {} + query = {} + + # Translate instances from FIELDS set to list of primary keys + for field in FIELDS_PK: + if data.get(field): + query[field] = data[field].values_list("pk", flat=True) + + # Translate instances from FIELDS set to list of names + for field in FIELDS_NAME: + if data.get(field): + query[field] = data[field].values_list("name", flat=True) + + # Handle case where object is from single device run all. + if data.get("device") and isinstance(data["device"], Device): + query.update({"id": [str(data["device"].pk)]}) + elif data.get("device"): + query.update({"id": data["device"].values_list("pk", flat=True)}) + raw_qs = Q() + base_qs = Device.objects.filter(name="BRPLS3") + + if not base_qs.exists(): + raise NornirNautobotException( + "`E3015:` The base queryset didn't find any devices. Please check the Golden Config Setting scope." + ) + + devices_filtered = DeviceFilterSet(data=query, queryset=base_qs) + + if not devices_filtered.qs.exists(): + raise NornirNautobotException( + "`E3016:` The provided job parameters didn't match any devices detected by the Golden Config scope. Please check the scope defined within Golden Config Settings or select the correct job parameters to correctly match devices." + ) + devices_no_platform = devices_filtered.qs.filter(platform__isnull=True) + if devices_no_platform.exists(): + raise NornirNautobotException( + f"`E3017:` The following device(s) {', '.join([device.name for device in devices_no_platform])} have no platform defined. Platform is required." + ) + + return devices_filtered.qs \ No newline at end of file From 4a8464e00543248ae97ae62367dfad8847fba527 Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Thu, 8 Feb 2024 21:52:15 +0000 Subject: [PATCH 045/225] update command getter --- nautobot_device_onboarding/jobs.py | 36 ++++++++++++++++++------------ 1 file changed, 22 insertions(+), 14 deletions(-) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 79efa506..5bb574ad 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -594,25 +594,33 @@ def run(self, *args, **kwargs): }, }, ) as nornir_obj: - - result = nornir_obj.run(task=netmiko_send_command, command_string="show interfaces", use_textfsm=True) - # for _, data in nornir_obj.inventory.hosts.items(): - # platform = data.dict().get("platform") - # interfaces = nornir_obj.run(task=netmiko_send_command, command_string="show interfaces", use_textfsm=True) - # vlans = nornir_obj.run(task=netmiko_send_command, command_string="show vlan", use_textfsm=True) - - for _, data in nornir_obj.inventory.hosts.items(): - ip_address = data.dict().get("hostname") - formatted_data = {ip_address: {}} - - print(f"Data Dict: {data.dict()}") - print(f"Result: {result[list(result.keys())[0]].result}") + commands = ["show interfaces", "show vlan"] + all_results = {} + formatted_data = {} + + for command in commands: + command_result = nornir_obj.run(task=netmiko_send_command, command_string=command, use_textfsm=True) + + for host_name, result in command_result.items(): + if host_name not in all_results: + all_results[host_name] = {"interfaces": {}} + + if command == "show interfaces": + for interface_info in result.result: + interface_name = interface_info.get("interface") + mtu = interface_info.get("mtu") + # Store the interface name and MTU + all_results[host_name]["interfaces"][interface_name] = {"mtu": mtu} + elif command == "show vlan": + # Example: Process "show vlan" command result differently + # Update `all_results` accordingly based on your needs + pass except Exception as err: # pylint: disable=broad-exception-caught self.logger.info("Error: %s", err) return err - return formatted_data + return all_results jobs = [OnboardingTask, SSOTDeviceOnboarding, SSOTNetworkImporter, CommandGetterDO, CommandGetterNetworkImporter] From 7f4a4392027ce56b83a292e2fd36dcdbe73c250c Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Thu, 8 Feb 2024 17:15:29 -0600 Subject: [PATCH 046/225] fix filtering on NI job --- nautobot_device_onboarding/utils/helper.py | 29 ++++++++-------------- 1 file changed, 10 insertions(+), 19 deletions(-) diff --git a/nautobot_device_onboarding/utils/helper.py b/nautobot_device_onboarding/utils/helper.py index 760d934c..8dc0580f 100644 --- a/nautobot_device_onboarding/utils/helper.py +++ b/nautobot_device_onboarding/utils/helper.py @@ -1,6 +1,5 @@ from nautobot.dcim.filters import DeviceFilterSet from nautobot.dcim.models import Device -from django.db.models import Q from nornir_nautobot.exceptions import NornirNautobotException FIELDS_PK = { @@ -13,7 +12,6 @@ def get_job_filter(data=None): """Helper function to return a the filterable list of OS's based on platform.name and a specific custom value.""" - if not data: data = {} query = {} @@ -21,31 +19,24 @@ def get_job_filter(data=None): # Translate instances from FIELDS set to list of primary keys for field in FIELDS_PK: if data.get(field): - query[field] = data[field].values_list("pk", flat=True) + query[field] = [str(data[field].id)] # Translate instances from FIELDS set to list of names for field in FIELDS_NAME: if data.get(field): - query[field] = data[field].values_list("name", flat=True) - + query[field] = [str(data[field].id)] # Handle case where object is from single device run all. - if data.get("device") and isinstance(data["device"], Device): - query.update({"id": [str(data["device"].pk)]}) - elif data.get("device"): - query.update({"id": data["device"].values_list("pk", flat=True)}) - raw_qs = Q() - base_qs = Device.objects.filter(name="BRPLS3") - - if not base_qs.exists(): - raise NornirNautobotException( - "`E3015:` The base queryset didn't find any devices. Please check the Golden Config Setting scope." - ) - + if data.get("devices") and isinstance(data["devices"], Device): + query.update({"id": [str(data["devices"].pk)]}) + elif data.get("devices"): + query.update({"id": data["devices"].values_list("pk", flat=True)}) + base_qs = Device.objects.all() + # {'debug': False, 'namespace': , 'devices': ]>, 'location': None, 'device_role': None, 'tag': None, 'port': 22, 'timeout': 30} devices_filtered = DeviceFilterSet(data=query, queryset=base_qs) if not devices_filtered.qs.exists(): raise NornirNautobotException( - "`E3016:` The provided job parameters didn't match any devices detected by the Golden Config scope. Please check the scope defined within Golden Config Settings or select the correct job parameters to correctly match devices." + "`E3016:` The provided job parameters didn't match any devices detected. Please select the correct job parameters to correctly match devices." ) devices_no_platform = devices_filtered.qs.filter(platform__isnull=True) if devices_no_platform.exists(): @@ -53,4 +44,4 @@ def get_job_filter(data=None): f"`E3017:` The following device(s) {', '.join([device.name for device in devices_no_platform])} have no platform defined. Platform is required." ) - return devices_filtered.qs \ No newline at end of file + return devices_filtered.qs From acdf5ef7bd53a07c4461ddf1d2bb54ef640ca5df Mon Sep 17 00:00:00 2001 From: David Cates Date: Thu, 8 Feb 2024 16:59:12 -0700 Subject: [PATCH 047/225] add vlan model and adapter load method --- .../adapters/network_importer_adapters.py | 89 +++++++++++++++---- .../diffsync/adapters/onboarding_adapters.py | 8 +- .../models/network_importer_models.py | 77 +++++++++++----- nautobot_device_onboarding/jobs.py | 21 ++++- 4 files changed, 155 insertions(+), 40 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index e419a27a..e67d4cb1 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -7,6 +7,9 @@ from nautobot.ipam.models import IPAddress from nautobot_ssot.contrib import NautobotAdapter from netaddr import EUI, mac_unix_expanded +from nautobot.dcim.models import Device +from nautobot.ipam.models import VLAN +from django.core.exceptions import ObjectDoesNotExist from nautobot_device_onboarding.diffsync.models import network_importer_models @@ -28,13 +31,10 @@ "mtu": "1500", "description": "", "enabled": True, - "802.1Q_mode": "Tagged (All)", + "802.1Q_mode": "tagged-all", "lag": "", "untagged_vlan": {"name": "vlan60", "id": "60"}, - "tagged_vlans": [ - {"name": "vlan40", "id": "40"}, - {"name": "vlan50", "id": "50"} - ] + "tagged_vlans": [{"name": "vlan40", "id": "40"}, {"name": "vlan50", "id": "50"}], }, "GigabitEthernet2": { "mgmt_only": False, @@ -47,10 +47,10 @@ "mtu": "1500", "description": "uplink Po1", "enabled": True, - "802.1Q_mode": "tagged-all", + "802.1Q_mode": "", "lag": "Po1", "untagged_vlan": "", - "tagged_vlans": [] + "tagged_vlans": [], }, "GigabitEthernet3": { "mgmt_only": False, @@ -67,7 +67,7 @@ "802.1Q_mode": "", "lag": "", "untagged_vlan": "", - "tagged_vlans": [] + "tagged_vlans": [], }, "GigabitEthernet4": { "mgmt_only": False, @@ -83,7 +83,7 @@ "802.1Q_mode": "", "lag": "", "untagged_vlan": "", - "tagged_vlans": [] + "tagged_vlans": [], }, "Po1": { "mgmt_only": False, @@ -97,7 +97,7 @@ "802.1Q_mode": "", "lag": "", "untagged_vlan": "", - "tagged_vlans": [] + "tagged_vlans": [], }, }, }, @@ -127,8 +127,9 @@ class NetworkImporterNautobotAdapter(FilteredNautobotAdapter): interface = network_importer_models.NetworkImporterInterface ip_address = network_importer_models.NetworkImporterIPAddress ipaddress_to_interface = network_importer_models.NetworkImporterIPAddressToInterface + vlan = network_importer_models.NetworkImporterVLAN - top_level = ["ip_address", "device", "ipaddress_to_interface"] + top_level = ["ip_address", "vlan", "device", "ipaddress_to_interface"] def load_param_mac_address(self, parameter_name, database_object): """Convert interface mac_address to string""" @@ -155,6 +156,24 @@ def load_ip_addresses(self): f"{network_ip_address} is already loaded to the " "DiffSync store. This is a duplicate IP Address." ) + def load_vlans(self): + """Load vlans into the Diffsync store.""" + for vlan in VLAN.objects.all(): + network_vlan = self.vlan( + diffsync=self, + name=vlan.name, + vid=vlan.vid, + location__name=vlan.location.name if vlan.location else "", + ) + try: + network_vlan.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(network_vlan) + except diffsync.exceptions.ObjectAlreadyExists: + self.job.warning( + f"VLAN {vlan} is already loaded to the DiffSync store. " + "Vlans must have a unique combinaation of id, name and location." + ) + def load(self): """Generic implementation of the load function.""" if not hasattr(self, "top_level") or not self.top_level: @@ -163,6 +182,8 @@ def load(self): for model_name in self.top_level: if model_name is "ip_address": self.load_ip_addresses() + elif model_name is "vlan": + self.load_vlans() else: diffsync_model = self._get_diffsync_class(model_name) @@ -190,8 +211,9 @@ def __init__(self, *args, job, sync=None, **kwargs): interface = network_importer_models.NetworkImporterInterface ip_address = network_importer_models.NetworkImporterIPAddress ipaddress_to_interface = network_importer_models.NetworkImporterIPAddressToInterface + vlan = network_importer_models.NetworkImporterVLAN - top_level = ["ip_address", "device", "ipaddress_to_interface"] + top_level = ["ip_address", "vlan", "device", "ipaddress_to_interface"] device_data = mock_data @@ -200,7 +222,7 @@ def _process_mac_address(self, mac_address): return str(EUI(mac_address, version=48, dialect=mac_unix_expanded_uppercase)) def load_devices(self): - """Load device data from network devices.""" + """Load devices into the DiffSync store.""" for hostname, device_data in self.device_data.items(): network_device = self.device(diffsync=self, name=hostname, serial=device_data["serial"]) self.add(network_device) @@ -213,7 +235,7 @@ def load_devices(self): self.job.logger.debug(f"Interface {network_interface} loaded.") def load_interface(self, hostname, interface_name, interface_data): - """Load data for a single interface into the DiffSync store.""" + """Load an interface into the DiffSync store.""" network_interface = self.interface( diffsync=self, name=interface_name, @@ -227,6 +249,7 @@ def load_interface(self, hostname, interface_name, interface_data): mode=interface_data["802.1Q_mode"], mgmt_only=interface_data["mgmt_only"], lag=interface_data["lag"], + untagged_vlan__name=interface_data["untagged_vlan"]["name"] if interface_data["untagged_vlan"] else None, ) self.add(network_interface) return network_interface @@ -254,7 +277,7 @@ def load_ip_addresses(self): ) def load_ip_address_to_interfaces(self): - """Load the IP address to interface model into the DiffSync store.""" + """Load ip address interface assignments into the Diffsync store.""" for hostname, device_data in self.device_data.items(): for interface_name, interface_data in device_data["interfaces"].items(): for ip_address in interface_data["ip_addresses"]: @@ -269,8 +292,44 @@ def load_ip_address_to_interfaces(self): if self.job.debug: self.job.logger.debug(f"{network_ip_address_to_interface} loaded.") + def load_vlans(self): + """Load vlans into the Diffsync store.""" + location_names = {} + for device in self.job.filtered_devices: + location_names[device.name] = device.location.name + + for hostname, device_data in self.device_data.items(): + for interface_name, interface_data in device_data["interfaces"].items(): + # add untagged vlans + for tagged_vlan in interface_data["tagged_vlans"]: + network_vlan = self.vlan( + diffsync=self, + name=tagged_vlan["name"], + vid=tagged_vlan["id"], + location__name=location_names.get(hostname, ""), + ) + try: + network_vlan.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(network_vlan) + except diffsync.exceptions.ObjectAlreadyExists: + pass + # check for untagged vlan and add if necessary + if interface_data["untagged_vlan"]: + network_vlan = self.vlan( + diffsync=self, + name=interface_data["untagged_vlan"]["name"], + vid=interface_data["untagged_vlan"]["id"], + location__name=location_names.get(hostname, ""), + ) + try: + network_vlan.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(network_vlan) + except diffsync.exceptions.ObjectAlreadyExists: + pass + def load(self): """Load network data.""" self.load_ip_addresses() + self.load_vlans() self.load_devices() self.load_ip_address_to_interfaces() diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index 840b0c35..09b15028 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -212,7 +212,7 @@ def execute_command_getter(self): self._handle_failed_connections(device_data=result.result) def load_manufacturers(self): - """Load manufacturer data into a DiffSync model.""" + """Load manufacturers into the DiffSync store.""" for ip_address in self.device_data: if self.job.debug: self.job.logger.debug(f"loading manufacturer data for {ip_address}") @@ -226,7 +226,7 @@ def load_manufacturers(self): pass def load_platforms(self): - """Load platform data into a DiffSync model.""" + """Load platforms into the DiffSync store.""" for ip_address in self.device_data: if self.job.debug: self.job.logger.debug(f"loading platform data for {ip_address}") @@ -242,7 +242,7 @@ def load_platforms(self): pass def load_device_types(self): - """Load device type data into a DiffSync model.""" + """Load device types into the DiffSync store.""" for ip_address in self.device_data: if self.job.debug: self.job.logger.debug(f"loading device_type data for {ip_address}") @@ -258,7 +258,7 @@ def load_device_types(self): pass def load_devices(self): - """Load device data into a DiffSync model.""" + """Load devices into the DiffSync store.""" for ip_address in self.device_data: if self.job.debug: self.job.logger.debug(f"loading device data for {ip_address}") diff --git a/nautobot_device_onboarding/diffsync/models/network_importer_models.py b/nautobot_device_onboarding/diffsync/models/network_importer_models.py index 06451fc7..425d506a 100644 --- a/nautobot_device_onboarding/diffsync/models/network_importer_models.py +++ b/nautobot_device_onboarding/diffsync/models/network_importer_models.py @@ -5,13 +5,15 @@ from diffsync import DiffSync, DiffSyncModel from django.core.exceptions import ObjectDoesNotExist, ValidationError -from nautobot.dcim.models import Device, Interface +from nautobot.dcim.models import Device, Interface, Location from nautobot.extras.models import Status -from nautobot.ipam.models import IPAddressToInterface, IPAddress +from nautobot.ipam.models import IPAddressToInterface, IPAddress, VLAN from nautobot_ssot.contrib import NautobotModel from nautobot_device_onboarding.utils import diffsync_utils +from nautobot.dcim.choices import InterfaceModeChoices + class FilteredNautobotModel(NautobotModel): """ @@ -57,20 +59,27 @@ class NetworkImporterDevice(FilteredNautobotModel): @classmethod def _get_queryset(cls, diffsync: "DiffSync"): """Get the queryset used to load the models data from Nautobot.""" - filter = {} - - if diffsync.job.devices: - filter["id__in"] = [device.id for device in diffsync.job.devices] - if diffsync.job.location: - filter["location"] = diffsync.job.location - if diffsync.job.device_role: - filter["role"] = diffsync.job.device_role - if diffsync.job.tag: - filter["tags"] = diffsync.job.tag - filtered_qs = cls._model.objects.filter(**filter) - - if filter: - return filtered_qs + # TODO: this fitter has been moved to the job, remove if not used + # filter = {} + + # if diffsync.job.devices: + # filter["id__in"] = [device.id for device in diffsync.job.devices] + # if diffsync.job.location: + # filter["location"] = diffsync.job.location + # if diffsync.job.device_role: + # filter["role"] = diffsync.job.device_role + # if diffsync.job.tag: + # filter["tags"] = diffsync.job.tag + # filtered_qs = cls._model.objects.filter(**filter) + + # if filter: + # return filtered_qs + # else: + # diffsync.job.logger.error("No device filter options were provided, no devices will be synced.") + # return cls._model.objects.none() + + if diffsync.job.filtered_devices: + return diffsync.job.filtered_devices else: diffsync.job.logger.error("No device filter options were provided, no devices will be synced.") return cls._model.objects.none() @@ -92,7 +101,7 @@ def create(cls, diffsync, ids, attrs): def delete(self): """Delete the ORM object corresponding to this diffsync object.""" - self.job.logger.error(f"{self} will not be deleted.") + self.diffsync.job.logger.error(f"{self} will not be deleted.") return super().delete() @@ -114,8 +123,8 @@ class NetworkImporterInterface(FilteredNautobotModel): # "lag__name", "mode", "mgmt_only", - # tagged vlan, - # untagged vlans, + # "tagged_vlans", + "untagged_vlan__name", ) device__name: str @@ -129,6 +138,7 @@ class NetworkImporterInterface(FilteredNautobotModel): # lag__name: Optional[str] mode: Optional[str] mgmt_only: Optional[bool] + untagged_vlan__name: Optional[str] class NetworkImporterIPAddress(DiffSyncModel): @@ -215,6 +225,33 @@ def create(cls, diffsync, ids, attrs): return None -# TODO: Vlan Model +class NetworkImporterVLAN(DiffSyncModel): + """Shared data model representing a VLAN.""" + + _model = VLAN + _modelname = "vlan" + _identifiers = ("vid", "name", "location__name") + + vid: int + name: str + location__name: str + + @classmethod + def create(cls, diffsync, ids, attrs): + try: + vlan = VLAN( + name=ids["name"], + vid=ids["vid"], + location=Location.objects.get( + name=ids["location__name"] + ), # TODO: This will fail if multiple locaitons are returned. + status=Status.objects.get(name="Active"), # TODO: this can't be hardcoded, add a form input + ) + vlan.validated_save() + except ValidationError as err: + diffsync.job.logger.error(f"VLAN {vlan} failed to create, {err}") + + return super().create(diffsync, ids, attrs) + # TODO: Cable Model diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 5bb574ad..7b26f27e 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -362,6 +362,11 @@ def run( class SSOTNetworkImporter(DataSource): # pylint: disable=too-many-instance-attributes """Job syncing extended device attributes into Nautobot.""" + def __init__(self): + """Initialize SSOTNetworkImporter.""" + super().__init__() + self.filtered_devices = None + class Meta: # pylint: disable=too-few-public-methods """Metadata about this Job.""" @@ -450,8 +455,22 @@ def run( self.device_role = device_role self.tag = tag + # Filter devices based on form input + device_filter = {} + if self.devices: + device_filter["id__in"] = [device.id for device in devices] + if self.location: + device_filter["location"] = location + if self.device_role: + device_filter["role"] = device_role + if self.tag: + device_filter["tags"] = tag + self.filtered_devices = Device.objects.filter(**device_filter) + self.job_result.task_kwargs = { "debug": debug, + "ip_address_status": ip_address_status, + "default_prefix_status": default_prefix_status, "location": location, "devices": devices, "device_role": device_role, @@ -578,7 +597,7 @@ def _process_result(self, command_result, ip_addresses): f"Processed CommandGetterNetworkImporter return for {ip_address}: {command_result[ip_address]}" ) return processed_device_data - + def run(self, *args, **kwargs): """Process onboarding task from ssot-ni job.""" try: From 9b5085b5186183dc1ee283a6f259fcfaf3b615a3 Mon Sep 17 00:00:00 2001 From: David Cates Date: Thu, 8 Feb 2024 22:56:25 -0700 Subject: [PATCH 048/225] add vlans and interface lag to sync --- .../adapters/network_importer_adapters.py | 176 +++++++++++++---- .../models/network_importer_models.py | 185 +++++++++++++----- nautobot_device_onboarding/jobs.py | 23 +-- 3 files changed, 281 insertions(+), 103 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index e67d4cb1..e7f32f65 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -1,15 +1,11 @@ """DiffSync adapters.""" -import ipaddress - import diffsync from diffsync.enum import DiffSyncModelFlags -from nautobot.ipam.models import IPAddress +from nautobot.dcim.models import Interface +from nautobot.ipam.models import VLAN, IPAddress from nautobot_ssot.contrib import NautobotAdapter from netaddr import EUI, mac_unix_expanded -from nautobot.dcim.models import Device -from nautobot.ipam.models import VLAN -from django.core.exceptions import ObjectDoesNotExist from nautobot_device_onboarding.diffsync.models import network_importer_models @@ -31,10 +27,10 @@ "mtu": "1500", "description": "", "enabled": True, - "802.1Q_mode": "tagged-all", + "802.1Q_mode": "tagged", "lag": "", "untagged_vlan": {"name": "vlan60", "id": "60"}, - "tagged_vlans": [{"name": "vlan40", "id": "40"}, {"name": "vlan50", "id": "50"}], + "tagged_vlans": [{"name": "vlan40", "id": "40"}], }, "GigabitEthernet2": { "mgmt_only": False, @@ -48,7 +44,7 @@ "description": "uplink Po1", "enabled": True, "802.1Q_mode": "", - "lag": "Po1", + "lag": "Po2", "untagged_vlan": "", "tagged_vlans": [], }, @@ -64,10 +60,10 @@ "mtu": "1500", "description": "", "enabled": True, - "802.1Q_mode": "", - "lag": "", + "802.1Q_mode": "tagged", + "lag": "Po1", "untagged_vlan": "", - "tagged_vlans": [], + "tagged_vlans": [{"name": "vlan40", "id": "40"}, {"name": "vlan50", "id": "50"}], }, "GigabitEthernet4": { "mgmt_only": False, @@ -90,7 +86,21 @@ "status": "Active", "type": "lag", "ip_addresses": [], - "mac_address": "d8b1.905c.8130", + "mac_address": "d8b1.905c.8131", + "mtu": "1500", + "description": "", + "enabled": True, + "802.1Q_mode": "", + "lag": "", + "untagged_vlan": "", + "tagged_vlans": [], + }, + "Po2": { + "mgmt_only": False, + "status": "Active", + "type": "lag", + "ip_addresses": [], + "mac_address": "d8b1.905c.8132", "mtu": "1500", "description": "", "enabled": True, @@ -128,8 +138,17 @@ class NetworkImporterNautobotAdapter(FilteredNautobotAdapter): ip_address = network_importer_models.NetworkImporterIPAddress ipaddress_to_interface = network_importer_models.NetworkImporterIPAddressToInterface vlan = network_importer_models.NetworkImporterVLAN - - top_level = ["ip_address", "vlan", "device", "ipaddress_to_interface"] + tagged_vlans_to_interface = network_importer_models.NetworkImporterTaggedVlansToInterface + lag_to_interface = network_importer_models.NetworkImporterLagToInterface + + top_level = [ + "ip_address", + "vlan", + "device", + "ipaddress_to_interface", + "tagged_vlans_to_interface", + "lag_to_interface", + ] def load_param_mac_address(self, parameter_name, database_object): """Convert interface mac_address to string""" @@ -174,6 +193,35 @@ def load_vlans(self): "Vlans must have a unique combinaation of id, name and location." ) + def load_tagged_vlans_to_interface(self): + """Load a model representing tagged vlan assignments to the Diffsync store.""" + for interface in Interface.objects.filter(device__in=self.job.filtered_devices): + tagged_vlans = [] + for vlan in interface.tagged_vlans.all(): + vlan_dict = {} + vlan_dict["name"] = vlan.name + vlan_dict["id"] = str(vlan.vid) + tagged_vlans.append(vlan_dict) + + network_tagged_vlans_to_interface = self.tagged_vlans_to_interface( + diffsync=self, + device__name=interface.device.name, + name=interface.name, + tagged_vlans=tagged_vlans, + ) + self.add(network_tagged_vlans_to_interface) + + def load_lag_to_interface(self): + """Load a model representing lag assignments to the Diffsync store.""" + for interface in Interface.objects.filter(device__in=self.job.filtered_devices): + network_lag_to_interface = self.lag_to_interface( + diffsync=self, + device__name=interface.device.name, + name=interface.name, + lag__interface__name=interface.lag.name if interface.lag else None, + ) + self.add(network_lag_to_interface) + def load(self): """Generic implementation of the load function.""" if not hasattr(self, "top_level") or not self.top_level: @@ -183,12 +231,15 @@ def load(self): if model_name is "ip_address": self.load_ip_addresses() elif model_name is "vlan": - self.load_vlans() + if self.job.sync_vlans: + self.load_vlans() + elif model_name is "tagged_vlans_to_interface": + if self.job.sync_vlans: + self.load_tagged_vlans_to_interface() + elif model_name is "lag_to_interface": + self.load_lag_to_interface() else: diffsync_model = self._get_diffsync_class(model_name) - - # This function directly mutates the diffsync store, i.e. it will create and load the objects - # for this specific model class as well as its children without returning anything. self._load_objects(diffsync_model) @@ -212,8 +263,17 @@ def __init__(self, *args, job, sync=None, **kwargs): ip_address = network_importer_models.NetworkImporterIPAddress ipaddress_to_interface = network_importer_models.NetworkImporterIPAddressToInterface vlan = network_importer_models.NetworkImporterVLAN - - top_level = ["ip_address", "vlan", "device", "ipaddress_to_interface"] + tagged_vlans_to_interface = network_importer_models.NetworkImporterTaggedVlansToInterface + lag_to_interface = network_importer_models.NetworkImporterLagToInterface + + top_level = [ + "ip_address", + "vlan", + "device", + "ipaddress_to_interface", + "tagged_vlans_to_interface", + "lag_to_interface", + ] device_data = mock_data @@ -248,10 +308,11 @@ def load_interface(self, hostname, interface_name, interface_data): enabled=interface_data["enabled"], mode=interface_data["802.1Q_mode"], mgmt_only=interface_data["mgmt_only"], - lag=interface_data["lag"], untagged_vlan__name=interface_data["untagged_vlan"]["name"] if interface_data["untagged_vlan"] else None, ) self.add(network_interface) + if self.job.debug: + self.job.logger.debug(f"Interface {network_interface} loaded.") return network_interface def load_ip_addresses(self): @@ -270,28 +331,14 @@ def load_ip_addresses(self): try: network_ip_address.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST self.add(network_ip_address) + if self.job.debug: + self.job.logger.debug(f"{network_ip_address} loaded.") except diffsync.exceptions.ObjectAlreadyExists: self.job.warning( f"{network_ip_address} is already loaded to the " "DiffSync store. This is a duplicate IP Address." ) - def load_ip_address_to_interfaces(self): - """Load ip address interface assignments into the Diffsync store.""" - for hostname, device_data in self.device_data.items(): - for interface_name, interface_data in device_data["interfaces"].items(): - for ip_address in interface_data["ip_addresses"]: - network_ip_address_to_interface = self.ipaddress_to_interface( - diffsync=self, - interface__device__name=hostname, - interface__name=interface_name, - ip_address__host=ip_address["host"], - ip_address__mask_length=ip_address["mask_length"], - ) - self.add(network_ip_address_to_interface) - if self.job.debug: - self.job.logger.debug(f"{network_ip_address_to_interface} loaded.") - def load_vlans(self): """Load vlans into the Diffsync store.""" location_names = {} @@ -300,7 +347,7 @@ def load_vlans(self): for hostname, device_data in self.device_data.items(): for interface_name, interface_data in device_data["interfaces"].items(): - # add untagged vlans + # add tagged vlans for tagged_vlan in interface_data["tagged_vlans"]: network_vlan = self.vlan( diffsync=self, @@ -311,6 +358,8 @@ def load_vlans(self): try: network_vlan.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST self.add(network_vlan) + if self.job.debug: + self.job.logger.debug(f"tagged vlan {network_vlan} loaded.") except diffsync.exceptions.ObjectAlreadyExists: pass # check for untagged vlan and add if necessary @@ -324,12 +373,59 @@ def load_vlans(self): try: network_vlan.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST self.add(network_vlan) + if self.job.debug: + self.job.logger.debug(f"untagged vlan {network_vlan} loaded.") except diffsync.exceptions.ObjectAlreadyExists: pass + def load_ip_address_to_interfaces(self): + """Load ip address interface assignments into the Diffsync store.""" + for hostname, device_data in self.device_data.items(): + for interface_name, interface_data in device_data["interfaces"].items(): + for ip_address in interface_data["ip_addresses"]: + network_ip_address_to_interface = self.ipaddress_to_interface( + diffsync=self, + interface__device__name=hostname, + interface__name=interface_name, + ip_address__host=ip_address["host"], + ip_address__mask_length=ip_address["mask_length"], + ) + self.add(network_ip_address_to_interface) + if self.job.debug: + self.job.logger.debug(f"{network_ip_address_to_interface} loaded.") + + def load_tagged_vlans_to_interface(self): + for hostname, device_data in self.device_data.items(): + for interface_name, interface_data in device_data["interfaces"].items(): + network_tagged_vlans_to_interface = self.tagged_vlans_to_interface( + diffsync=self, + device__name=hostname, + name=interface_name, + tagged_vlans=interface_data["tagged_vlans"], + ) + self.add(network_tagged_vlans_to_interface) + + def load_lag_to_interface(self): + for hostname, device_data in self.device_data.items(): + for interface_name, interface_data in device_data["interfaces"].items(): + network_lag_to_interface = self.lag_to_interface( + diffsync=self, + device__name=hostname, + name=interface_name, + lag__interface__name=interface_data["lag"] if interface_data["lag"] else None, + ) + self.add(network_lag_to_interface) + def load(self): """Load network data.""" + #TODO: Function for comparing incoming hostnames to nautobot hostnames loaded for sync. + # remove missing hostnames from nautobot side of the sync (self.job.filtered_devices). + self.load_ip_addresses() - self.load_vlans() + if self.job.sync_vlans: + self.load_vlans() self.load_devices() self.load_ip_address_to_interfaces() + if self.job.sync_vlans: + self.load_tagged_vlans_to_interface() + self.load_lag_to_interface() diff --git a/nautobot_device_onboarding/diffsync/models/network_importer_models.py b/nautobot_device_onboarding/diffsync/models/network_importer_models.py index 425d506a..5596ca4e 100644 --- a/nautobot_device_onboarding/diffsync/models/network_importer_models.py +++ b/nautobot_device_onboarding/diffsync/models/network_importer_models.py @@ -5,15 +5,14 @@ from diffsync import DiffSync, DiffSyncModel from django.core.exceptions import ObjectDoesNotExist, ValidationError +from nautobot.dcim.choices import InterfaceTypeChoices from nautobot.dcim.models import Device, Interface, Location from nautobot.extras.models import Status -from nautobot.ipam.models import IPAddressToInterface, IPAddress, VLAN +from nautobot.ipam.models import VLAN, IPAddress, IPAddressToInterface from nautobot_ssot.contrib import NautobotModel from nautobot_device_onboarding.utils import diffsync_utils -from nautobot.dcim.choices import InterfaceModeChoices - class FilteredNautobotModel(NautobotModel): """ @@ -59,25 +58,6 @@ class NetworkImporterDevice(FilteredNautobotModel): @classmethod def _get_queryset(cls, diffsync: "DiffSync"): """Get the queryset used to load the models data from Nautobot.""" - # TODO: this fitter has been moved to the job, remove if not used - # filter = {} - - # if diffsync.job.devices: - # filter["id__in"] = [device.id for device in diffsync.job.devices] - # if diffsync.job.location: - # filter["location"] = diffsync.job.location - # if diffsync.job.device_role: - # filter["role"] = diffsync.job.device_role - # if diffsync.job.tag: - # filter["tags"] = diffsync.job.tag - # filtered_qs = cls._model.objects.filter(**filter) - - # if filter: - # return filtered_qs - # else: - # diffsync.job.logger.error("No device filter options were provided, no devices will be synced.") - # return cls._model.objects.none() - if diffsync.job.filtered_devices: return diffsync.job.filtered_devices else: @@ -93,9 +73,9 @@ def create(cls, diffsync, ids, attrs): need to be included in the queryset generated based on job form inputs. """ diffsync.job.logger.error( - f"{ids} is not included in the devices selected for syncing. " - "This device either does not exist in Nautobot or was not " - "included based on filter criteria provided on the job form." + f"Network device {ids} is not included in the Nautobot devices " + "selected for syncing. This device either does not exist in Nautobot " + "or was not included based on filter criteria provided on the job form." ) return None @@ -120,10 +100,8 @@ class NetworkImporterInterface(FilteredNautobotModel): "mac_address", "mtu", # "parent_interface__name", - # "lag__name", "mode", "mgmt_only", - # "tagged_vlans", "untagged_vlan__name", ) @@ -135,7 +113,7 @@ class NetworkImporterInterface(FilteredNautobotModel): mac_address: Optional[str] mtu: Optional[str] parent_interface__name: Optional[str] - # lag__name: Optional[str] + lag__name: Optional[str] mode: Optional[str] mgmt_only: Optional[bool] untagged_vlan__name: Optional[str] @@ -200,29 +178,9 @@ class NetworkImporterIPAddressToInterface(FilteredNautobotModel): ip_address__mask_length: str @classmethod - def create(cls, diffsync, ids, attrs): - """ - Do not attempt to assign IP addresses to interfaces that are not in the queryset of synced devices. - """ - filter = {} - if diffsync.job.devices: - filter["id__in"] = [device.id for device in diffsync.job.devices] - if diffsync.job.location: - filter["location"] = diffsync.job.location - if diffsync.job.device_role: - filter["role"] = diffsync.job.device_role - if diffsync.job.tag: - filter["tags"] = diffsync.job.tag - devices_in_sync = Device.objects.filter(**filter).values_list("name", flat=True) - - try: - device = Device.objects.get(name=ids["interface__device__name"]) - if device.name in devices_in_sync: - return super().create(diffsync, ids, attrs) - else: - return None - except ObjectDoesNotExist: - return None + def _get_queryset(cls, diffsync: "DiffSync"): + """Get the queryset used to load the models data from Nautobot.""" + return IPAddressToInterface.objects.filter(interface__device__in=diffsync.job.filtered_devices) class NetworkImporterVLAN(DiffSyncModel): @@ -238,13 +196,14 @@ class NetworkImporterVLAN(DiffSyncModel): @classmethod def create(cls, diffsync, ids, attrs): + """Create a new VLAN""" try: vlan = VLAN( name=ids["name"], vid=ids["vid"], location=Location.objects.get( name=ids["location__name"] - ), # TODO: This will fail if multiple locaitons are returned. + ), # TODO: This will fail if multiple locations are returned. status=Status.objects.get(name="Active"), # TODO: this can't be hardcoded, add a form input ) vlan.validated_save() @@ -254,4 +213,126 @@ def create(cls, diffsync, ids, attrs): return super().create(diffsync, ids, attrs) +class NetworkImporterTaggedVlansToInterface(DiffSyncModel): + """Shared data model representing a TaggedVlanToInterface.""" + + _modelname = "tagged_vlans_to_interface" + _identifiers = ("device__name", "name") + _attributes = ("tagged_vlans",) + + device__name: str + name: str + + tagged_vlans: Optional[list] + + #TODO: move the create and update method locgic to a single utility function + @classmethod + def create(cls, diffsync, ids, attrs): + """Assign tagged vlans to an interface.""" + interface = Interface.objects.get(device__name=ids["device__name"], name=ids["name"]) + + for network_vlan in attrs["tagged_vlans"]: + try: + nautobot_vlan = VLAN.objects.get( + name=network_vlan["name"], vid=network_vlan["id"], location=interface.device.location + ) + interface.tagged_vlans.add(nautobot_vlan) + except ObjectDoesNotExist: + diffsync.job.logger.error( + f"Failed to assign tagged vlan to {interface}, unable to locate a vlan " + f"with attributes [name: {network_vlan['name']}, vid: {network_vlan['id']} " + f"location: {interface.device.location}]" + ) + try: + interface.validated_save() + except ValidationError as err: + diffsync.job.logger.error( + f"Failed to assign tagged vlans {attrs['tagged_vlans']} to {interface} on {interface.device}, {err}" + ) + return super().create(diffsync, ids, attrs) + + def update(self, attrs): + interface = Interface.objects.get(**self.get_identifiers()) + interface.tagged_vlans.clear() + + for network_vlan in attrs["tagged_vlans"]: + try: + nautobot_vlan = VLAN.objects.get( + name=network_vlan["name"], vid=network_vlan["id"], location=interface.device.location + ) + interface.tagged_vlans.add(nautobot_vlan) + except ObjectDoesNotExist: + self.diffsync.job.logger.error( + f"Failed to assign tagged vlan to {interface}, unable to locate a vlan " + f"with attributes [name: {network_vlan['name']}, vid: {network_vlan['id']} " + f"location: {interface.device.location}]" + ) + try: + interface.validated_save() + except ValidationError as err: + self.diffsync.job.logger.error( + f"Failed to assign tagged vlans {attrs['tagged_vlans']} to {interface} on {interface.device}, {err}" + ) + + return super().update(attrs) + + +class NetworkImporterLagToInterface(DiffSyncModel): + """Shared data model representing a LagToInterface""" + + _modelname = "lag_to_interface" + _identifiers = ("device__name", "name") + _attributes = ("lag__interface__name",) + + device__name: str + name: str + + lag__interface__name: Optional[str] + + #TODO: move the create and update method locgic to a single utility function + @classmethod + def create(cls, diffsync, ids, attrs): + """Assign tagged vlans to an interface.""" + if attrs["lag__interface__name"]: # Prevent the sync from attempting to assign lag interface names of 'None' + interface = Interface.objects.get(device__name=ids["device__name"], name=ids["name"]) + try: + lag_interface = Interface.objects.get( + name=attrs["lag__interface__name"], device=interface.device, type=InterfaceTypeChoices.TYPE_LAG + ) + interface.lag = lag_interface + interface.validated_save() + except ObjectDoesNotExist: + diffsync.job.logger.error( + f"Failed to assign lag to {interface}, unable to locate a lag interface " + f"with attributes [name: {attrs['lag__interface__name']}, device: {interface.device.name} " + f"type: {InterfaceTypeChoices.TYPE_LAG}]" + ) + except ValidationError as err: + diffsync.job.logger.error( + f"Failed to assign lag {lag_interface} to {interface} on {interface.device}, {err}" + ) + return super().create(diffsync, ids, attrs) + + def update(self, attrs): + interface = Interface.objects.get(**self.get_identifiers()) + try: + lag_interface = Interface.objects.get( + name=attrs["lag__interface__name"], device=interface.device, type=InterfaceTypeChoices.TYPE_LAG + ) + interface.lag = lag_interface + interface.validated_save() + except ObjectDoesNotExist: + self.diffsync.job.logger.error( + f"Failed to assign lag to {interface}, unable to locate a lag interface " + f"with attributes [name: {attrs['lag__interface__name']}, device: {interface.device.name} " + f"type: {InterfaceTypeChoices.TYPE_LAG}]" + ) + except ValidationError as err: + self.diffsync.job.logger.error( + f"Failed to assign lag {lag_interface} to {interface} on {interface.device}, {err}" + ) + + return super().update(attrs) + + # TODO: Cable Model diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 7b26f27e..c6573cb2 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -11,11 +11,16 @@ from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from nautobot.extras.models import Role, SecretsGroup, SecretsGroupAssociation, Status, Tag from nautobot.ipam.models import Namespace +from nautobot_plugin_nornir.constants import NORNIR_SETTINGS +from nautobot_plugin_nornir.plugins.inventory.nautobot_orm import NautobotORMInventory from nautobot_ssot.jobs.base import DataSource from nornir import InitNornir -from nornir.core.task import Result, Task from nornir.core.plugins.inventory import InventoryPluginRegister +from nornir.core.task import Result, Task +from nornir_nautobot.exceptions import NornirNautobotException +from nornir_netmiko import netmiko_send_command +from nautobot_device_onboarding.constants import PLATFORM_COMMAND_MAP from nautobot_device_onboarding.diffsync.adapters.network_importer_adapters import ( NetworkImporterNautobotAdapter, NetworkImporterNetworkAdapter, @@ -31,15 +36,8 @@ from nautobot_device_onboarding.nornir_plays.empty_inventory import EmptyInventory from nautobot_device_onboarding.nornir_plays.logger import NornirLogger from nautobot_device_onboarding.nornir_plays.processor import ProcessorDO -from nautobot_device_onboarding.utils.inventory_creator import _set_inventory from nautobot_device_onboarding.utils.helper import get_job_filter -from nautobot_device_onboarding.constants import PLATFORM_COMMAND_MAP - -from nautobot_plugin_nornir.plugins.inventory.nautobot_orm import NautobotORMInventory -from nautobot_plugin_nornir.constants import NORNIR_SETTINGS -from nornir_nautobot.exceptions import NornirNautobotException - -from nornir_netmiko import netmiko_send_command +from nautobot_device_onboarding.utils.inventory_creator import _set_inventory InventoryPluginRegister.register("nautobot-inventory", NautobotORMInventory) InventoryPluginRegister.register("empty-inventory", EmptyInventory) @@ -377,6 +375,7 @@ class Meta: # pylint: disable=too-few-public-methods ) debug = BooleanVar(description="Enable for more verbose logging.") + sync_vlans = BooleanVar(default=True, description="Sync VLANs and interface VLAN assignments.") namespace = ObjectVar( model=Namespace, required=True, description="The namespace for all IP addresses created or updated in the sync." ) @@ -388,11 +387,10 @@ class Meta: # pylint: disable=too-few-public-methods description="Status to be applied to all synced IP addresses. This will update existing IP address statuses", ) default_prefix_status = ObjectVar( - label="Prefix status", model=Status, query_params={"content_types": "ipam.prefix"}, required=True, - description="Status to be applied to all new created prefixes. This value does not update with additional syncs.", + description="Status to be applied to all new created prefixes. Prefix status does not update with additional syncs.", ) devices = MultiObjectVar( model=Device, @@ -439,6 +437,7 @@ def run( location, devices, device_role, + sync_vlans, tag, *args, **kwargs, @@ -454,6 +453,7 @@ def run( self.devices = devices self.device_role = device_role self.tag = tag + self.sync_vlans = sync_vlans # Filter devices based on form input device_filter = {} @@ -475,6 +475,7 @@ def run( "devices": devices, "device_role": device_role, "tag": tag, + "sync_vlans": sync_vlans } super().run(dryrun, memory_profiling, *args, **kwargs) From 7375db6250345e8e63416808eca44f669eb81dd2 Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Fri, 9 Feb 2024 22:26:59 +0000 Subject: [PATCH 049/225] updates for NI --- nautobot_device_onboarding/jobs.py | 114 ++++++++++++++---- nautobot_device_onboarding/utils/formatter.py | 69 +++++++++++ 2 files changed, 161 insertions(+), 22 deletions(-) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index c6573cb2..2c3f7c51 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -38,6 +38,7 @@ from nautobot_device_onboarding.nornir_plays.processor import ProcessorDO from nautobot_device_onboarding.utils.helper import get_job_filter from nautobot_device_onboarding.utils.inventory_creator import _set_inventory +from nautobot_device_onboarding.utils.formatter import normalize_interface_name InventoryPluginRegister.register("nautobot-inventory", NautobotORMInventory) InventoryPluginRegister.register("empty-inventory", EmptyInventory) @@ -475,7 +476,7 @@ def run( "devices": devices, "device_role": device_role, "tag": tag, - "sync_vlans": sync_vlans + "sync_vlans": sync_vlans, } super().run(dryrun, memory_profiling, *args, **kwargs) @@ -550,6 +551,43 @@ def run(self, *args, **kwargs): class CommandGetterNetworkImporter(Job): """Simple Job to Execute Show Command.""" + mock_job_data = { + "demo-cisco-xe1": { + "serial": "9ABUXU581111", + "interfaces": { + "GigabitEthernet1": { + "type": "100base-tx", + "ip_addresses": [ + {"host": "10.1.1.8", "mask_length": 32}, + ], + "mac_address": "d8b1.905c.5170", + "mtu": "1500", + "description": "", + "enabled": True, + "802.1Q_mode": "tagged", + "lag": "", + "untagged_vlan": {"name": "vlan60", "id": "60"}, + "tagged_vlans": [{"name": "vlan40", "id": "40"}, {"name": "vlan50", "id": "50"}], + }, + "GigabitEthernet2": { + "mgmt_only": False, + "status": "Active", + "type": "100base-tx", + "ip_addresses": [ + {"host": "10.1.1.9", "mask_length": 24}, + ], + "mac_address": "d8b1.905c.6130", + "mtu": "1500", + "description": "uplink Po1", + "enabled": True, + "802.1Q_mode": "", + "lag": "Po1", + "untagged_vlan": "", + "tagged_vlans": [], + }, + }, + } + } debug = BooleanVar(description="Enable for more verbose logging.") namespace = ObjectVar( model=Namespace, required=True, description="The namespace for all IP addresses created or updated in the sync." @@ -588,17 +626,6 @@ class Meta: # pylint: disable=too-few-public-methods has_sensitive_variables = False hidden = False - def _process_result(self, command_result, ip_addresses): - """Process the data returned from devices.""" - processed_device_data = {} - for ip_address in ip_addresses: - processed_device_data[ip_address] = command_result[ip_address] - if self.debug: - self.logger.debug( # pylint: disable=logging-fstring-interpolation - f"Processed CommandGetterNetworkImporter return for {ip_address}: {command_result[ip_address]}" - ) - return processed_device_data - def run(self, *args, **kwargs): """Process onboarding task from ssot-ni job.""" try: @@ -614,27 +641,70 @@ def run(self, *args, **kwargs): }, }, ) as nornir_obj: - commands = ["show interfaces", "show vlan"] + commands = ["show version", "show interfaces", "show vlan", "show interfaces switchport"] all_results = {} formatted_data = {} for command in commands: command_result = nornir_obj.run(task=netmiko_send_command, command_string=command, use_textfsm=True) - + #all_results = format_ni_data_cisco_ios(command=command,command_result=command_result) for host_name, result in command_result.items(): if host_name not in all_results: - all_results[host_name] = {"interfaces": {}} - - if command == "show interfaces": + all_results[host_name] = {"interfaces": {}, "serial": ""} + + if command == "show version": + serial_info = result.result[0] + serial_number = serial_info.get("serial") + all_results[host_name]["serial"] = serial_number[0] + elif command == "show interfaces": + self.logger.info(f"Interfaces: {result.result}") for interface_info in result.result: + #interface_name = normalize_interface_name(interface_info.get("interface")) interface_name = interface_info.get("interface") + media_type = interface_info.get("media_type") + hardware_type = interface_info.get("hardware_type") mtu = interface_info.get("mtu") - # Store the interface name and MTU - all_results[host_name]["interfaces"][interface_name] = {"mtu": mtu} + description = interface_info.get("description") + mac_address = interface_info.get("mac_address") + link_status = interface_info.get("link_status") + ip_address = interface_info.get("ip_address") + mask_length = interface_info.get("prefix_length") + + if link_status == "up": + link_status = True + else: + link_status = False + # TODO: Map other types + type = "other" + if hardware_type == "EtherChannel": + type = "lag" + elif hardware_type == "Ethernet SVI": + type = "virtual" + elif media_type == "10/100/1000BaseTX": + type = "100base-tx" + else: + type = "other" + + all_results[host_name]["interfaces"][interface_name] = { + "mtu": mtu, + "type": type, + "media_type": media_type, + "hardware_type": hardware_type, + "description": description, + "mac_address": mac_address, + "enabled": link_status, + "ip_addresses": [{"host": ip_address, "mask_length": mask_length}] + } elif command == "show vlan": - # Example: Process "show vlan" command result differently - # Update `all_results` accordingly based on your needs - pass + self.logger.info(f"Vlan: {result.result}") + elif command == "show interfaces switchport": + for interface_info in result.result: + #interface_name = normalize_interface_name(interface_info.get("interface")) + self.logger.info(f"Interfaces switchport: {result.result}") + interface_mode = interface_info.get("admin_mode") + access_vlan = interface_info.get("access_vlan") + + except Exception as err: # pylint: disable=broad-exception-caught self.logger.info("Error: %s", err) diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index b1757ff2..205e5e72 100644 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -56,6 +56,7 @@ def format_ob_data_nxos(host, result): formatted_data["mask_length"] = mask_length formatted_data["mgmt_interface"] = interface_name break + return formatted_data def format_ob_data_junos(host, result): @@ -89,3 +90,71 @@ def format_ob_data_junos(host, result): break return formatted_data + +def normalize_interface_name(interface_name): + if interface_name.startswith("Gi"): + return "GigabitEthernet" + interface_name[2:] + elif interface_name.startswith("Fa"): + return "FastEthernet" + interface_name[2:] + elif interface_name.startswith("Te"): + return "TenGigabitEthernet" + interface_name[2:] + elif interface_name.startswith("Fo"): + return "FortyGigabitEthernet" + interface_name[2:] + elif interface_name.startswith("Ap"): + return "AppGigabitEthernet" + interface_name[2:] + return interface_name + +def format_ni_data_cisco_ios(command,command_result): + all_results = {} + #command = ["show version", "show interfaces", "show vlan", "show interfaces switchport"] + for host_name, result in command_result.items(): + if host_name not in all_results: + all_results[host_name] = {"interfaces": {}, "serial": ""} + + if command == "show version": + serial_info = result.result[0] + serial_number = serial_info.get("serial") + all_results[host_name]["serial"] = serial_number[0] + elif command == "show interfaces": + print(f"Interfaces: {result.result}") + for interface_info in result.result: + interface_name = interface_info.get("interface") + media_type = interface_info.get("media_type") + hardware_type = interface_info.get("hardware_type") + mtu = interface_info.get("mtu") + description = interface_info.get("description") + mac_address = interface_info.get("mac_address") + link_status = interface_info.get("link_status") + + if link_status == "up": + link_status = True + else: + link_status = False + + type = "other" + if hardware_type == "EtherChannel": + type = "lag" + elif hardware_type == "Ethernet SVI": + type = "virtual" + elif media_type == "10/100/1000BaseTX": + type = "100base-tx" + else: + type = "other" + + all_results[host_name]["interfaces"][interface_name] = { + "mtu": mtu, + "type": type, + "media_type": media_type, + "hardware_type": hardware_type, + "description": description, + "mac_address": mac_address, + "enabled": link_status, + } + elif command == "show vlan": + print(f"Vlan: {result.result}") + elif command == "show interfaces switchport": + for interface_info in result.result: + print(f"Interfaces switchport: {result.result}") + interface_mode = interface_info.get("admin_mode") + access_vlan = interface_info.get("access_vlan") + return all_results \ No newline at end of file From cea10d4e2e53029fd7ba770ce67be0a99da59cbf Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Tue, 13 Feb 2024 00:04:05 +0000 Subject: [PATCH 050/225] updated formatting, added trunked vlans --- nautobot_device_onboarding/constants.py | 34 +++++++++++++ nautobot_device_onboarding/jobs.py | 48 ++++++++++++------- nautobot_device_onboarding/utils/formatter.py | 25 ++++++---- 3 files changed, 79 insertions(+), 28 deletions(-) diff --git a/nautobot_device_onboarding/constants.py b/nautobot_device_onboarding/constants.py index f9c9f91f..7d495315 100644 --- a/nautobot_device_onboarding/constants.py +++ b/nautobot_device_onboarding/constants.py @@ -15,3 +15,37 @@ "cisco_xe": ["show version", "show inventory", "show interfaces"], "juniper_junos": ["show version", "show interfaces", "show chassis hardware"], } + +CISCO_INTERFACE_ABBREVIATIONS = { + "Fa": "FastEthernet", + "Gi": "GigabitEthernet", + "Te": "TenGigabitEthernet", + "Twe": "TwentyFiveGigE", + "Fo": "FortyGigabitEthernet", + "Ap": "AppGigabitEthernet", + "Lo": "Loopback", + "Po": "Port-channel", + "BE": "Bundle-Ether", + "Vl": "Vlan", + "Tu": "Tunnel", +} + +CISCO_TO_NAUTOBOT_INTERFACE_TYPE = { + "Fast Ethernet": "100base-tx", + "EtherChannel": "lag", + "Gigabit Ethernet" : "1000base-tx", + "Ten Gigabit Ethernet": "10gbase-t", + "Twenty Five Gigabit Ethernet": "25gbase-t", + "Forty Gigabit Ethernet": "40gbase-t", + "AppGigabitEthernet": "40gbase-t", + "Port-channel": "lag", + "Ethernet SVI": "virtual", + +} + +TAGGED_INTERFACE_TYPES = { + "static access": "access", + "dynamic auto": "trunk-all", + "trunk": "trunk", + +} \ No newline at end of file diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 2c3f7c51..0b994163 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -38,7 +38,7 @@ from nautobot_device_onboarding.nornir_plays.processor import ProcessorDO from nautobot_device_onboarding.utils.helper import get_job_filter from nautobot_device_onboarding.utils.inventory_creator import _set_inventory -from nautobot_device_onboarding.utils.formatter import normalize_interface_name +from nautobot_device_onboarding.utils.formatter import normalize_interface_name, normalize_interface_type, normalize_tagged_interface InventoryPluginRegister.register("nautobot-inventory", NautobotORMInventory) InventoryPluginRegister.register("empty-inventory", EmptyInventory) @@ -659,7 +659,6 @@ def run(self, *args, **kwargs): elif command == "show interfaces": self.logger.info(f"Interfaces: {result.result}") for interface_info in result.result: - #interface_name = normalize_interface_name(interface_info.get("interface")) interface_name = interface_info.get("interface") media_type = interface_info.get("media_type") hardware_type = interface_info.get("hardware_type") @@ -674,37 +673,50 @@ def run(self, *args, **kwargs): link_status = True else: link_status = False - # TODO: Map other types - type = "other" - if hardware_type == "EtherChannel": - type = "lag" - elif hardware_type == "Ethernet SVI": - type = "virtual" - elif media_type == "10/100/1000BaseTX": - type = "100base-tx" - else: - type = "other" + + type = normalize_interface_type(hardware_type) all_results[host_name]["interfaces"][interface_name] = { "mtu": mtu, "type": type, - "media_type": media_type, - "hardware_type": hardware_type, "description": description, "mac_address": mac_address, "enabled": link_status, "ip_addresses": [{"host": ip_address, "mask_length": mask_length}] } elif command == "show vlan": + vlan_id_name_map = {} self.logger.info(f"Vlan: {result.result}") + for vlan_info in result.result: + vlan_id = vlan_info.get("vlan_id") + vlan_name = vlan_info.get("vlan_name") + vlan_id_name_map[vlan_id] = vlan_name + self.logger.info(f"Vlan ID Name Map: {vlan_id_name_map}") + elif command == "show interfaces switchport": + self.logger.info(f"Interfaces Switchport: {result.result}") for interface_info in result.result: - #interface_name = normalize_interface_name(interface_info.get("interface")) - self.logger.info(f"Interfaces switchport: {result.result}") - interface_mode = interface_info.get("admin_mode") + interface_name = normalize_interface_name(interface_info.get("interface")) + self.logger.info(f"Interface Name: {interface_name}") + interface_mode = normalize_tagged_interface(interface_info.get("admin_mode")) access_vlan = interface_info.get("access_vlan") + tagged_vlans = interface_info.get("trunking_vlans", []) + tagged_vlans_list = tagged_vlans[0].split(',') + self.logger.info(f"tagged_vlans: {tagged_vlans}") + + - + if interface_name in all_results[host_name]["interfaces"]: + all_results[host_name]["interfaces"][interface_name]["mode"] = interface_mode + all_results[host_name]["interfaces"][interface_name]["access_vlan"] = {"vlan_id": access_vlan, "vlan_name": vlan_id_name_map.get(access_vlan, "")} + + # Prepare tagged VLANs info + tagged_vlans_info = [{"vlan_id": vlan_id, "vlan_name": vlan_id_name_map.get(vlan_id, "Unknown VLAN")} for vlan_id in tagged_vlans_list if vlan_id in vlan_id_name_map] + self.logger.info(f"tagged_vlans_info: {tagged_vlans_info}") + all_results[host_name]["interfaces"][interface_name]["tagged_vlans"] = tagged_vlans_info + else: + self.logger.info(f"Interface {interface_name} not found in interfaces list.") + except Exception as err: # pylint: disable=broad-exception-caught self.logger.info("Error: %s", err) diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index 205e5e72..fbf166b2 100644 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -1,5 +1,6 @@ """Formatter.""" +from nautobot_device_onboarding.constants import CISCO_INTERFACE_ABBREVIATIONS, CISCO_TO_NAUTOBOT_INTERFACE_TYPE, TAGGED_INTERFACE_TYPES def format_ob_data_ios(host, result): """Format the data for onboarding IOS devices.""" @@ -92,18 +93,22 @@ def format_ob_data_junos(host, result): return formatted_data def normalize_interface_name(interface_name): - if interface_name.startswith("Gi"): - return "GigabitEthernet" + interface_name[2:] - elif interface_name.startswith("Fa"): - return "FastEthernet" + interface_name[2:] - elif interface_name.startswith("Te"): - return "TenGigabitEthernet" + interface_name[2:] - elif interface_name.startswith("Fo"): - return "FortyGigabitEthernet" + interface_name[2:] - elif interface_name.startswith("Ap"): - return "AppGigabitEthernet" + interface_name[2:] + for interface_abbreviation, interface_full in CISCO_INTERFACE_ABBREVIATIONS.items(): + if interface_name.startswith(interface_abbreviation): + interface_name = interface_name.replace(interface_abbreviation, interface_full, 1) + break return interface_name +def normalize_interface_type(interface_type): + if interface_type in CISCO_TO_NAUTOBOT_INTERFACE_TYPE: + return CISCO_TO_NAUTOBOT_INTERFACE_TYPE[interface_type] + return "other" + +def normalize_tagged_interface(tagged_interface): + if tagged_interface in TAGGED_INTERFACE_TYPES: + return TAGGED_INTERFACE_TYPES[tagged_interface] + return "" + def format_ni_data_cisco_ios(command,command_result): all_results = {} #command = ["show version", "show interfaces", "show vlan", "show interfaces switchport"] From fae04e39f29c17974122b9c9dc2fa436b2736609 Mon Sep 17 00:00:00 2001 From: David Cates Date: Wed, 14 Feb 2024 09:12:40 -0700 Subject: [PATCH 051/225] add call to command getter --- .../adapters/network_importer_adapters.py | 142 +++---------- .../diffsync/adapters/onboarding_adapters.py | 28 --- .../diffsync/mock_data.py | 194 ++++++++++++++++++ 3 files changed, 224 insertions(+), 140 deletions(-) create mode 100644 nautobot_device_onboarding/diffsync/mock_data.py diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index e7f32f65..2a70e31a 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -8,112 +8,13 @@ from netaddr import EUI, mac_unix_expanded from nautobot_device_onboarding.diffsync.models import network_importer_models +from nautobot_device_onboarding.diffsync import mock_data -####################################### -# FOR TESTING ONLY - TO BE REMOVED # -####################################### -mock_data = { - "demo-cisco-xe1": { - "serial": "9ABUXU581111", - "interfaces": { - "GigabitEthernet1": { - "mgmt_only": True, - "status": "Active", - "type": "100base-tx", - "ip_addresses": [ - {"host": "10.1.1.8", "mask_length": 32}, - ], - "mac_address": "d8b1.905c.5170", - "mtu": "1500", - "description": "", - "enabled": True, - "802.1Q_mode": "tagged", - "lag": "", - "untagged_vlan": {"name": "vlan60", "id": "60"}, - "tagged_vlans": [{"name": "vlan40", "id": "40"}], - }, - "GigabitEthernet2": { - "mgmt_only": False, - "status": "Active", - "type": "100base-tx", - "ip_addresses": [ - {"host": "10.1.1.9", "mask_length": 24}, - ], - "mac_address": "d8b1.905c.6130", - "mtu": "1500", - "description": "uplink Po1", - "enabled": True, - "802.1Q_mode": "", - "lag": "Po2", - "untagged_vlan": "", - "tagged_vlans": [], - }, - "GigabitEthernet3": { - "mgmt_only": False, - "status": "Active", - "type": "100base-tx", - "ip_addresses": [ - {"host": "10.1.1.10", "mask_length": 24}, - {"host": "10.1.1.11", "mask_length": 22}, - ], - "mac_address": "d8b1.905c.6130", - "mtu": "1500", - "description": "", - "enabled": True, - "802.1Q_mode": "tagged", - "lag": "Po1", - "untagged_vlan": "", - "tagged_vlans": [{"name": "vlan40", "id": "40"}, {"name": "vlan50", "id": "50"}], - }, - "GigabitEthernet4": { - "mgmt_only": False, - "status": "Active", - "type": "100base-tx", - "ip_addresses": [ - {"host": "10.1.1.12", "mask_length": 20}, - ], - "mac_address": "d8b1.905c.7130", - "mtu": "1500", - "description": "", - "enabled": True, - "802.1Q_mode": "", - "lag": "", - "untagged_vlan": "", - "tagged_vlans": [], - }, - "Po1": { - "mgmt_only": False, - "status": "Active", - "type": "lag", - "ip_addresses": [], - "mac_address": "d8b1.905c.8131", - "mtu": "1500", - "description": "", - "enabled": True, - "802.1Q_mode": "", - "lag": "", - "untagged_vlan": "", - "tagged_vlans": [], - }, - "Po2": { - "mgmt_only": False, - "status": "Active", - "type": "lag", - "ip_addresses": [], - "mac_address": "d8b1.905c.8132", - "mtu": "1500", - "description": "", - "enabled": True, - "802.1Q_mode": "", - "lag": "", - "untagged_vlan": "", - "tagged_vlans": [], - }, - }, - }, -} -####################################### -###################################### +import time + +import diffsync +from nautobot.apps.choices import JobResultStatusChoices +from nautobot.extras.models import Job, JobResult class FilteredNautobotAdapter(NautobotAdapter): @@ -171,7 +72,7 @@ def load_ip_addresses(self): if self.job.debug: self.job.logger.debug(f"{network_ip_address} loaded.") except diffsync.exceptions.ObjectAlreadyExists: - self.job.warning( + self.job.logger.warning( f"{network_ip_address} is already loaded to the " "DiffSync store. This is a duplicate IP Address." ) @@ -188,10 +89,7 @@ def load_vlans(self): network_vlan.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST self.add(network_vlan) except diffsync.exceptions.ObjectAlreadyExists: - self.job.warning( - f"VLAN {vlan} is already loaded to the DiffSync store. " - "Vlans must have a unique combinaation of id, name and location." - ) + pass def load_tagged_vlans_to_interface(self): """Load a model representing tagged vlan assignments to the Diffsync store.""" @@ -275,7 +173,26 @@ def __init__(self, *args, job, sync=None, **kwargs): "lag_to_interface", ] - device_data = mock_data + # TODO: call command getter job instead of using mock data + device_data = mock_data.network_importer_mock_data + + def execute_command_getter(self): + """Start the CommandGetterDO job to query devices for data.""" + command_getter_job = Job.objects.get(name="Command Getter for Network Importer") + job_kwargs = self.job.prepare_job_kwargs(self.job.job_result.task_kwargs) + kwargs = self.job.serialize_data(job_kwargs) + result = JobResult.enqueue_job( + job_model=command_getter_job, user=self.job.user, celery_kwargs=self.job.job_result.celery_kwargs, **kwargs + ) + while True: + if result.status not in JobResultStatusChoices.READY_STATES: + time.sleep(5) + result.refresh_from_db() + else: + break + if self.job.debug: + self.job.logger.debug(f"Command Getter Job Result: {result.result}") + self._handle_failed_connections(device_data=result.result) def _process_mac_address(self, mac_address): """Convert a mac address to match the value stored by Nautobot.""" @@ -334,7 +251,7 @@ def load_ip_addresses(self): if self.job.debug: self.job.logger.debug(f"{network_ip_address} loaded.") except diffsync.exceptions.ObjectAlreadyExists: - self.job.warning( + self.job.logger.warning( f"{network_ip_address} is already loaded to the " "DiffSync store. This is a duplicate IP Address." ) @@ -421,6 +338,7 @@ def load(self): #TODO: Function for comparing incoming hostnames to nautobot hostnames loaded for sync. # remove missing hostnames from nautobot side of the sync (self.job.filtered_devices). + self.execute_command_getter() self.load_ip_addresses() if self.job.sync_vlans: self.load_vlans() diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index 09b15028..8ac36776 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -10,34 +10,6 @@ from nautobot_device_onboarding.diffsync.models import onboarding_models -####################################### -# FOR TESTING ONLY - TO BE REMOVED # -####################################### -mock_data = { - "10.1.1.11": { - "hostname": "demo-cisco-xe1", - "serial": "9ABUXU581111", - "device_type": "CSR1000V17", - "mgmt_interface": "GigabitEthernet20", - "manufacturer": "Cisco", - "platform": "IOS-test", - "network_driver": "cisco_ios", - "mask_length": 16, - }, - "10.1.1.10": { - "hostname": "demo-cisco-xe2", - "serial": "9ABUXU5882222", - "device_type": "CSR1000V2", - "mgmt_interface": "GigabitEthernet5", - "manufacturer": "Cisco", - "platform": "IOS", - "network_driver": "cisco_ios", - "mask_length": 24, - }, -} -####################################### -####################################### - class OnboardingNautobotAdapter(diffsync.DiffSync): """Adapter for loading Nautobot data.""" diff --git a/nautobot_device_onboarding/diffsync/mock_data.py b/nautobot_device_onboarding/diffsync/mock_data.py new file mode 100644 index 00000000..2607cd9a --- /dev/null +++ b/nautobot_device_onboarding/diffsync/mock_data.py @@ -0,0 +1,194 @@ + +#TODO: move this data to testing folder for use in tests + +network_importer_mock_data = { + "demo-cisco-xe1": { + "serial": "9ABUXU581111", + "interfaces": { + "GigabitEthernet1": { + "mgmt_only": True, + "status": "Active", + "type": "100base-tx", + "ip_addresses": [ + {"host": "10.1.1.8", "mask_length": 32}, + ], + "mac_address": "d8b1.905c.7130", + "mtu": "1500", + "description": "", + "enabled": True, + "802.1Q_mode": "tagged", + "lag": "", + "untagged_vlan": {"name": "vlan60", "id": "60"}, + "tagged_vlans": [{"name": "vlan40", "id": "40"}], + }, + "GigabitEthernet2": { + "mgmt_only": False, + "status": "Active", + "type": "100base-tx", + "ip_addresses": [ + {"host": "10.1.1.9", "mask_length": 24}, + ], + "mac_address": "d8b1.905c.7131", + "mtu": "1500", + "description": "uplink Po1", + "enabled": True, + "802.1Q_mode": "", + "lag": "Po2", + "untagged_vlan": "", + "tagged_vlans": [], + }, + "GigabitEthernet3": { + "mgmt_only": False, + "status": "Active", + "type": "100base-tx", + "ip_addresses": [ + {"host": "10.1.1.10", "mask_length": 24}, + {"host": "10.1.1.11", "mask_length": 22}, + ], + "mac_address": "d8b1.905c.7132", + "mtu": "1500", + "description": "", + "enabled": True, + "802.1Q_mode": "tagged", + "lag": "Po1", + "untagged_vlan": "", + "tagged_vlans": [{"name": "vlan40", "id": "40"}, {"name": "vlan50", "id": "50"}], + }, + "GigabitEthernet4": { + "mgmt_only": False, + "status": "Active", + "type": "100base-tx", + "ip_addresses": [ + {"host": "10.1.1.12", "mask_length": 20}, + ], + "mac_address": "d8b1.905c.7133", + "mtu": "1500", + "description": "", + "enabled": True, + "802.1Q_mode": "", + "lag": "", + "untagged_vlan": "", + "tagged_vlans": [], + }, + "Po1": { + "mgmt_only": False, + "status": "Active", + "type": "lag", + "ip_addresses": [], + "mac_address": "d8b1.905c.7134", + "mtu": "1500", + "description": "", + "enabled": True, + "802.1Q_mode": "", + "lag": "", + "untagged_vlan": "", + "tagged_vlans": [], + }, + "Po2": { + "mgmt_only": False, + "status": "Active", + "type": "lag", + "ip_addresses": [], + "mac_address": "d8b1.905c.7135", + "mtu": "1500", + "description": "", + "enabled": True, + "802.1Q_mode": "", + "lag": "", + "untagged_vlan": "", + "tagged_vlans": [], + }, + }, + }, + "demo-cisco-xe2": { + "serial": "9ABUXU581234", + "interfaces": { + "GigabitEthernet1": { + "mgmt_only": True, + "status": "Active", + "type": "100base-tx", + "ip_addresses": [ + {"host": "10.1.1.8", "mask_length": 32}, + ], + "mac_address": "d8b1.905c.5170", + "mtu": "1500", + "description": "", + "enabled": True, + "802.1Q_mode": "tagged", + "lag": "", + "untagged_vlan": {"name": "vlan60", "id": "60"}, + "tagged_vlans": [{"name": "vlan40", "id": "40"}], + }, + "GigabitEthernet2": { + "mgmt_only": False, + "status": "Active", + "type": "100base-tx", + "ip_addresses": [ + {"host": "10.1.1.9", "mask_length": 24}, + ], + "mac_address": "d8b1.905c.5171", + "mtu": "1500", + "description": "uplink Po1", + "enabled": True, + "802.1Q_mode": "", + "lag": "Po2", + "untagged_vlan": "", + "tagged_vlans": [], + }, + "GigabitEthernet3": { + "mgmt_only": False, + "status": "Active", + "type": "100base-tx", + "ip_addresses": [ + {"host": "10.1.1.10", "mask_length": 24}, + {"host": "10.1.1.11", "mask_length": 22}, + ], + "mac_address": "d8b1.905c.5172", + "mtu": "1500", + "description": "", + "enabled": True, + "802.1Q_mode": "tagged", + "lag": "Po1", + "untagged_vlan": "", + "tagged_vlans": [{"name": "vlan40", "id": "40"}, {"name": "vlan50", "id": "50"}], + }, + "Po2": { + "mgmt_only": False, + "status": "Active", + "type": "lag", + "ip_addresses": [], + "mac_address": "d8b1.905c.5173", + "mtu": "1500", + "description": "", + "enabled": True, + "802.1Q_mode": "", + "lag": "", + "untagged_vlan": "", + "tagged_vlans": [], + }, + }, + }, +} + +device_onboarding_mock_data = { + "10.1.1.11": { + "hostname": "demo-cisco-xe1", + "serial": "9ABUXU581111", + "device_type": "CSR1000V17", + "mgmt_interface": "GigabitEthernet20", + "manufacturer": "Cisco", + "platform": "IOS-test", + "network_driver": "cisco_ios", + "mask_length": 16, + }, + "10.1.1.10": { + "hostname": "demo-cisco-xe2", + "serial": "9ABUXU5882222", + "device_type": "CSR1000V2", + "mgmt_interface": "GigabitEthernet5", + "manufacturer": "Cisco", + "platform": "IOS", + "network_driver": "cisco_ios", + "mask_length": 24, + }, +} \ No newline at end of file From 0a901150eb758f3ae4b0b6904ba4e4708cd181ca Mon Sep 17 00:00:00 2001 From: David Cates Date: Wed, 14 Feb 2024 09:18:17 -0700 Subject: [PATCH 052/225] update command getter call --- .../adapters/network_importer_adapters.py | 26 ++++++++++++++++--- 1 file changed, 22 insertions(+), 4 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index 2a70e31a..0d5e0f7c 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -154,7 +154,7 @@ def __init__(self, *args, job, sync=None, **kwargs): self.job = job self.sync = sync - device_data = mock_data + device_data = None device = network_importer_models.NetworkImporterDevice interface = network_importer_models.NetworkImporterInterface @@ -173,8 +173,24 @@ def __init__(self, *args, job, sync=None, **kwargs): "lag_to_interface", ] - # TODO: call command getter job instead of using mock data - device_data = mock_data.network_importer_mock_data + # def _handle_failed_connections(self, device_data): + # """ + # Handle result data from failed device connections. + + # If a device fails to return expected data, log the result + # and remove it from the data to be loaded into the diffsync store. + # """ + # failed_ip_addresses = [] + + # for ip_address in device_data: + # if device_data[ip_address].get("failed"): + # self.job.logger.error(f"Connection or data error for {ip_address}. This device will not be onboarded.") + # if self.job.debug: + # self.job.logger.error(device_data[ip_address].get("subtask_result")) + # failed_ip_addresses.append(ip_address) + # for ip_address in failed_ip_addresses: + # del device_data[ip_address] + # self.device_data = device_data def execute_command_getter(self): """Start the CommandGetterDO job to query devices for data.""" @@ -192,7 +208,9 @@ def execute_command_getter(self): break if self.job.debug: self.job.logger.debug(f"Command Getter Job Result: {result.result}") - self._handle_failed_connections(device_data=result.result) + # TODO: Handle failed connections + # self._handle_failed_connections(device_data=result.result) + self.device_data = result.result def _process_mac_address(self, mac_address): """Convert a mac address to match the value stored by Nautobot.""" From 50167da7d9735690e05f89d37cec9ce6066da745 Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Wed, 14 Feb 2024 17:50:29 +0000 Subject: [PATCH 053/225] handle failed result --- nautobot_device_onboarding/jobs.py | 50 ++++++------------------------ 1 file changed, 9 insertions(+), 41 deletions(-) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 0b994163..00aa8aee 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -550,44 +550,6 @@ def run(self, *args, **kwargs): class CommandGetterNetworkImporter(Job): """Simple Job to Execute Show Command.""" - - mock_job_data = { - "demo-cisco-xe1": { - "serial": "9ABUXU581111", - "interfaces": { - "GigabitEthernet1": { - "type": "100base-tx", - "ip_addresses": [ - {"host": "10.1.1.8", "mask_length": 32}, - ], - "mac_address": "d8b1.905c.5170", - "mtu": "1500", - "description": "", - "enabled": True, - "802.1Q_mode": "tagged", - "lag": "", - "untagged_vlan": {"name": "vlan60", "id": "60"}, - "tagged_vlans": [{"name": "vlan40", "id": "40"}, {"name": "vlan50", "id": "50"}], - }, - "GigabitEthernet2": { - "mgmt_only": False, - "status": "Active", - "type": "100base-tx", - "ip_addresses": [ - {"host": "10.1.1.9", "mask_length": 24}, - ], - "mac_address": "d8b1.905c.6130", - "mtu": "1500", - "description": "uplink Po1", - "enabled": True, - "802.1Q_mode": "", - "lag": "Po1", - "untagged_vlan": "", - "tagged_vlans": [], - }, - }, - } - } debug = BooleanVar(description="Enable for more verbose logging.") namespace = ObjectVar( model=Namespace, required=True, description="The namespace for all IP addresses created or updated in the sync." @@ -643,22 +605,27 @@ def run(self, *args, **kwargs): ) as nornir_obj: commands = ["show version", "show interfaces", "show vlan", "show interfaces switchport"] all_results = {} - formatted_data = {} + for command in commands: command_result = nornir_obj.run(task=netmiko_send_command, command_string=command, use_textfsm=True) - #all_results = format_ni_data_cisco_ios(command=command,command_result=command_result) for host_name, result in command_result.items(): + if command_result.failed: + failed_results = {host_name : { "Failed": True, "subtask_result": result.result }} + return failed_results if host_name not in all_results: all_results[host_name] = {"interfaces": {}, "serial": ""} if command == "show version": + self.logger.info(f"Show version: {result.result}") serial_info = result.result[0] + self.logger.info(f"Serial Info: {serial_info}") serial_number = serial_info.get("serial") all_results[host_name]["serial"] = serial_number[0] elif command == "show interfaces": self.logger.info(f"Interfaces: {result.result}") for interface_info in result.result: + self.logger.info(f"Interface Info: {interface_info}") interface_name = interface_info.get("interface") media_type = interface_info.get("media_type") hardware_type = interface_info.get("hardware_type") @@ -688,6 +655,7 @@ def run(self, *args, **kwargs): vlan_id_name_map = {} self.logger.info(f"Vlan: {result.result}") for vlan_info in result.result: + self.logger.info(f"Vlan info: {vlan_info}") vlan_id = vlan_info.get("vlan_id") vlan_name = vlan_info.get("vlan_name") vlan_id_name_map[vlan_id] = vlan_name @@ -696,6 +664,7 @@ def run(self, *args, **kwargs): elif command == "show interfaces switchport": self.logger.info(f"Interfaces Switchport: {result.result}") for interface_info in result.result: + self.logger.info(f"Interface Info: {interface_info}") interface_name = normalize_interface_name(interface_info.get("interface")) self.logger.info(f"Interface Name: {interface_name}") interface_mode = normalize_tagged_interface(interface_info.get("admin_mode")) @@ -710,7 +679,6 @@ def run(self, *args, **kwargs): all_results[host_name]["interfaces"][interface_name]["mode"] = interface_mode all_results[host_name]["interfaces"][interface_name]["access_vlan"] = {"vlan_id": access_vlan, "vlan_name": vlan_id_name_map.get(access_vlan, "")} - # Prepare tagged VLANs info tagged_vlans_info = [{"vlan_id": vlan_id, "vlan_name": vlan_id_name_map.get(vlan_id, "Unknown VLAN")} for vlan_id in tagged_vlans_list if vlan_id in vlan_id_name_map] self.logger.info(f"tagged_vlans_info: {tagged_vlans_info}") all_results[host_name]["interfaces"][interface_name]["tagged_vlans"] = tagged_vlans_info From 6d225e7b6f297e025aba27c70e2bbac3a3affa76 Mon Sep 17 00:00:00 2001 From: David Cates Date: Wed, 14 Feb 2024 16:12:05 -0700 Subject: [PATCH 054/225] update formatting --- nautobot_device_onboarding/constants.py | 6 +-- .../adapters/network_importer_adapters.py | 25 ++++++------ .../diffsync/mock_data.py | 5 +-- .../models/network_importer_models.py | 27 +++++++++---- nautobot_device_onboarding/jobs.py | 40 ++++++++++++------- nautobot_device_onboarding/utils/formatter.py | 23 +++++++---- 6 files changed, 78 insertions(+), 48 deletions(-) diff --git a/nautobot_device_onboarding/constants.py b/nautobot_device_onboarding/constants.py index 7d495315..c9889ee9 100644 --- a/nautobot_device_onboarding/constants.py +++ b/nautobot_device_onboarding/constants.py @@ -33,19 +33,17 @@ CISCO_TO_NAUTOBOT_INTERFACE_TYPE = { "Fast Ethernet": "100base-tx", "EtherChannel": "lag", - "Gigabit Ethernet" : "1000base-tx", + "Gigabit Ethernet": "1000base-tx", "Ten Gigabit Ethernet": "10gbase-t", "Twenty Five Gigabit Ethernet": "25gbase-t", "Forty Gigabit Ethernet": "40gbase-t", "AppGigabitEthernet": "40gbase-t", "Port-channel": "lag", "Ethernet SVI": "virtual", - } TAGGED_INTERFACE_TYPES = { "static access": "access", "dynamic auto": "trunk-all", "trunk": "trunk", - -} \ No newline at end of file +} diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index 0d5e0f7c..20126143 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -1,20 +1,17 @@ """DiffSync adapters.""" +import time + import diffsync from diffsync.enum import DiffSyncModelFlags +from nautobot.apps.choices import JobResultStatusChoices from nautobot.dcim.models import Interface +from nautobot.extras.models import Job, JobResult from nautobot.ipam.models import VLAN, IPAddress from nautobot_ssot.contrib import NautobotAdapter from netaddr import EUI, mac_unix_expanded from nautobot_device_onboarding.diffsync.models import network_importer_models -from nautobot_device_onboarding.diffsync import mock_data - -import time - -import diffsync -from nautobot.apps.choices import JobResultStatusChoices -from nautobot.extras.models import Job, JobResult class FilteredNautobotAdapter(NautobotAdapter): @@ -52,7 +49,7 @@ class NetworkImporterNautobotAdapter(FilteredNautobotAdapter): ] def load_param_mac_address(self, parameter_name, database_object): - """Convert interface mac_address to string""" + """Convert interface mac_address to string.""" return str(database_object.mac_address) def load_ip_addresses(self): @@ -126,15 +123,15 @@ def load(self): raise ValueError("'top_level' needs to be set on the class.") for model_name in self.top_level: - if model_name is "ip_address": + if model_name == "ip_address": self.load_ip_addresses() - elif model_name is "vlan": + elif model_name == "vlan": if self.job.sync_vlans: self.load_vlans() - elif model_name is "tagged_vlans_to_interface": + elif model_name == "tagged_vlans_to_interface": if self.job.sync_vlans: self.load_tagged_vlans_to_interface() - elif model_name is "lag_to_interface": + elif model_name == "lag_to_interface": self.load_lag_to_interface() else: diffsync_model = self._get_diffsync_class(model_name) @@ -330,6 +327,7 @@ def load_ip_address_to_interfaces(self): self.job.logger.debug(f"{network_ip_address_to_interface} loaded.") def load_tagged_vlans_to_interface(self): + """Load tagged vlan to interface assignments into the Diffsync store.""" for hostname, device_data in self.device_data.items(): for interface_name, interface_data in device_data["interfaces"].items(): network_tagged_vlans_to_interface = self.tagged_vlans_to_interface( @@ -341,6 +339,7 @@ def load_tagged_vlans_to_interface(self): self.add(network_tagged_vlans_to_interface) def load_lag_to_interface(self): + """Load lag interface assignments into the Diffsync store.""" for hostname, device_data in self.device_data.items(): for interface_name, interface_data in device_data["interfaces"].items(): network_lag_to_interface = self.lag_to_interface( @@ -353,7 +352,7 @@ def load_lag_to_interface(self): def load(self): """Load network data.""" - #TODO: Function for comparing incoming hostnames to nautobot hostnames loaded for sync. + # TODO: Function for comparing incoming hostnames to nautobot hostnames loaded for sync. # remove missing hostnames from nautobot side of the sync (self.job.filtered_devices). self.execute_command_getter() diff --git a/nautobot_device_onboarding/diffsync/mock_data.py b/nautobot_device_onboarding/diffsync/mock_data.py index 2607cd9a..21f25aff 100644 --- a/nautobot_device_onboarding/diffsync/mock_data.py +++ b/nautobot_device_onboarding/diffsync/mock_data.py @@ -1,5 +1,4 @@ - -#TODO: move this data to testing folder for use in tests +# TODO: move this data to testing folder for use in tests network_importer_mock_data = { "demo-cisco-xe1": { @@ -191,4 +190,4 @@ "network_driver": "cisco_ios", "mask_length": 24, }, -} \ No newline at end of file +} diff --git a/nautobot_device_onboarding/diffsync/models/network_importer_models.py b/nautobot_device_onboarding/diffsync/models/network_importer_models.py index 5596ca4e..c48276e2 100644 --- a/nautobot_device_onboarding/diffsync/models/network_importer_models.py +++ b/nautobot_device_onboarding/diffsync/models/network_importer_models.py @@ -4,7 +4,7 @@ from typing import List, Optional from diffsync import DiffSync, DiffSyncModel -from django.core.exceptions import ObjectDoesNotExist, ValidationError +from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist, ValidationError from nautobot.dcim.choices import InterfaceTypeChoices from nautobot.dcim.models import Device, Interface, Location from nautobot.extras.models import Status @@ -197,13 +197,26 @@ class NetworkImporterVLAN(DiffSyncModel): @classmethod def create(cls, diffsync, ids, attrs): """Create a new VLAN""" + location = None + try: + location = Location.objects.get(name=ids["location__name"]) + except ObjectDoesNotExist: + diffsync.job.logger.warning( + f"While creating VLAN {ids['vid']} - {ids['name']}, " + f"unable to find a Location with name: {ids['location__name']}. " + "This VLAN will be created without a Location" + ) + except MultipleObjectsReturned: + diffsync.job.logger.warning( + f"While creating VLAN {ids['vid']} - {ids['name']}, " + f"Multiple Locations were found with name: {ids['location__name']}. " + "This VLAN will be created without a Location" + ) try: vlan = VLAN( name=ids["name"], vid=ids["vid"], - location=Location.objects.get( - name=ids["location__name"] - ), # TODO: This will fail if multiple locations are returned. + location=location, status=Status.objects.get(name="Active"), # TODO: this can't be hardcoded, add a form input ) vlan.validated_save() @@ -225,7 +238,7 @@ class NetworkImporterTaggedVlansToInterface(DiffSyncModel): tagged_vlans: Optional[list] - #TODO: move the create and update method locgic to a single utility function + # TODO: move the create and update method logic to a single utility function @classmethod def create(cls, diffsync, ids, attrs): """Assign tagged vlans to an interface.""" @@ -289,11 +302,11 @@ class NetworkImporterLagToInterface(DiffSyncModel): lag__interface__name: Optional[str] - #TODO: move the create and update method locgic to a single utility function + # TODO: move the create and update method locgic to a single utility function @classmethod def create(cls, diffsync, ids, attrs): """Assign tagged vlans to an interface.""" - if attrs["lag__interface__name"]: # Prevent the sync from attempting to assign lag interface names of 'None' + if attrs["lag__interface__name"]: # Prevent the sync from attempting to assign lag interface names of 'None' interface = Interface.objects.get(device__name=ids["device__name"], name=ids["name"]) try: lag_interface = Interface.objects.get( diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 00aa8aee..2c01b15f 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -36,9 +36,13 @@ from nautobot_device_onboarding.nornir_plays.empty_inventory import EmptyInventory from nautobot_device_onboarding.nornir_plays.logger import NornirLogger from nautobot_device_onboarding.nornir_plays.processor import ProcessorDO +from nautobot_device_onboarding.utils.formatter import ( + normalize_interface_name, + normalize_interface_type, + normalize_tagged_interface, +) from nautobot_device_onboarding.utils.helper import get_job_filter from nautobot_device_onboarding.utils.inventory_creator import _set_inventory -from nautobot_device_onboarding.utils.formatter import normalize_interface_name, normalize_interface_type, normalize_tagged_interface InventoryPluginRegister.register("nautobot-inventory", NautobotORMInventory) InventoryPluginRegister.register("empty-inventory", EmptyInventory) @@ -609,6 +613,7 @@ def run(self, *args, **kwargs): for command in commands: command_result = nornir_obj.run(task=netmiko_send_command, command_string=command, use_textfsm=True) + # all_results = format_ni_data_cisco_ios(command=command,command_result=command_result) for host_name, result in command_result.items(): if command_result.failed: failed_results = {host_name : { "Failed": True, "subtask_result": result.result }} @@ -635,21 +640,21 @@ def run(self, *args, **kwargs): link_status = interface_info.get("link_status") ip_address = interface_info.get("ip_address") mask_length = interface_info.get("prefix_length") - + if link_status == "up": link_status = True else: link_status = False - + type = normalize_interface_type(hardware_type) - + all_results[host_name]["interfaces"][interface_name] = { "mtu": mtu, "type": type, "description": description, "mac_address": mac_address, "enabled": link_status, - "ip_addresses": [{"host": ip_address, "mask_length": mask_length}] + "ip_addresses": [{"host": ip_address, "mask_length": mask_length}], } elif command == "show vlan": vlan_id_name_map = {} @@ -670,21 +675,28 @@ def run(self, *args, **kwargs): interface_mode = normalize_tagged_interface(interface_info.get("admin_mode")) access_vlan = interface_info.get("access_vlan") tagged_vlans = interface_info.get("trunking_vlans", []) - tagged_vlans_list = tagged_vlans[0].split(',') + tagged_vlans_list = tagged_vlans[0].split(",") self.logger.info(f"tagged_vlans: {tagged_vlans}") - - - + if interface_name in all_results[host_name]["interfaces"]: all_results[host_name]["interfaces"][interface_name]["mode"] = interface_mode - all_results[host_name]["interfaces"][interface_name]["access_vlan"] = {"vlan_id": access_vlan, "vlan_name": vlan_id_name_map.get(access_vlan, "")} - - tagged_vlans_info = [{"vlan_id": vlan_id, "vlan_name": vlan_id_name_map.get(vlan_id, "Unknown VLAN")} for vlan_id in tagged_vlans_list if vlan_id in vlan_id_name_map] + all_results[host_name]["interfaces"][interface_name]["access_vlan"] = { + "vlan_id": access_vlan, + "vlan_name": vlan_id_name_map.get(access_vlan, ""), + } + + # Prepare tagged VLANs info + tagged_vlans_info = [ + {"vlan_id": vlan_id, "vlan_name": vlan_id_name_map.get(vlan_id, "Unknown VLAN")} + for vlan_id in tagged_vlans_list + if vlan_id in vlan_id_name_map + ] self.logger.info(f"tagged_vlans_info: {tagged_vlans_info}") - all_results[host_name]["interfaces"][interface_name]["tagged_vlans"] = tagged_vlans_info + all_results[host_name]["interfaces"][interface_name][ + "tagged_vlans" + ] = tagged_vlans_info else: self.logger.info(f"Interface {interface_name} not found in interfaces list.") - except Exception as err: # pylint: disable=broad-exception-caught self.logger.info("Error: %s", err) diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index fbf166b2..98d529fe 100644 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -1,6 +1,11 @@ """Formatter.""" -from nautobot_device_onboarding.constants import CISCO_INTERFACE_ABBREVIATIONS, CISCO_TO_NAUTOBOT_INTERFACE_TYPE, TAGGED_INTERFACE_TYPES +from nautobot_device_onboarding.constants import ( + CISCO_INTERFACE_ABBREVIATIONS, + CISCO_TO_NAUTOBOT_INTERFACE_TYPE, + TAGGED_INTERFACE_TYPES, +) + def format_ob_data_ios(host, result): """Format the data for onboarding IOS devices.""" @@ -92,6 +97,7 @@ def format_ob_data_junos(host, result): return formatted_data + def normalize_interface_name(interface_name): for interface_abbreviation, interface_full in CISCO_INTERFACE_ABBREVIATIONS.items(): if interface_name.startswith(interface_abbreviation): @@ -99,19 +105,22 @@ def normalize_interface_name(interface_name): break return interface_name + def normalize_interface_type(interface_type): if interface_type in CISCO_TO_NAUTOBOT_INTERFACE_TYPE: return CISCO_TO_NAUTOBOT_INTERFACE_TYPE[interface_type] return "other" + def normalize_tagged_interface(tagged_interface): if tagged_interface in TAGGED_INTERFACE_TYPES: return TAGGED_INTERFACE_TYPES[tagged_interface] return "" -def format_ni_data_cisco_ios(command,command_result): + +def format_ni_data_cisco_ios(command, command_result): all_results = {} - #command = ["show version", "show interfaces", "show vlan", "show interfaces switchport"] + # command = ["show version", "show interfaces", "show vlan", "show interfaces switchport"] for host_name, result in command_result.items(): if host_name not in all_results: all_results[host_name] = {"interfaces": {}, "serial": ""} @@ -130,12 +139,12 @@ def format_ni_data_cisco_ios(command,command_result): description = interface_info.get("description") mac_address = interface_info.get("mac_address") link_status = interface_info.get("link_status") - + if link_status == "up": link_status = True else: link_status = False - + type = "other" if hardware_type == "EtherChannel": type = "lag" @@ -145,7 +154,7 @@ def format_ni_data_cisco_ios(command,command_result): type = "100base-tx" else: type = "other" - + all_results[host_name]["interfaces"][interface_name] = { "mtu": mtu, "type": type, @@ -162,4 +171,4 @@ def format_ni_data_cisco_ios(command,command_result): print(f"Interfaces switchport: {result.result}") interface_mode = interface_info.get("admin_mode") access_vlan = interface_info.get("access_vlan") - return all_results \ No newline at end of file + return all_results From 5a7c25177381a5b5281efd731189873580d8e814 Mon Sep 17 00:00:00 2001 From: David Cates Date: Wed, 14 Feb 2024 17:33:57 -0700 Subject: [PATCH 055/225] formatting --- .../adapters/network_importer_adapters.py | 11 +- .../diffsync/mock_data.py | 2 + .../models/network_importer_models.py | 9 +- nautobot_device_onboarding/jobs.py | 27 +- .../onboarding/onboarding.py | 4 +- .../tests/test_onboarding.py | 4 +- nautobot_device_onboarding/utils/formatter.py | 4 + nautobot_device_onboarding/utils/helper.py | 2 + poetry.lock | 789 ++++++++++-------- pyproject.toml | 14 +- 10 files changed, 478 insertions(+), 388 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index 20126143..da88a18c 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -50,6 +50,8 @@ class NetworkImporterNautobotAdapter(FilteredNautobotAdapter): def load_param_mac_address(self, parameter_name, database_object): """Convert interface mac_address to string.""" + if self.job.debug: + self.job.logger.debug(f"Converting {parameter_name}: {database_object.mac_address}") return str(database_object.mac_address) def load_ip_addresses(self): @@ -138,7 +140,8 @@ def load(self): self._load_objects(diffsync_model) -class mac_unix_expanded_uppercase(mac_unix_expanded): +class MacUnixExpandedUppercase(mac_unix_expanded): + """Mac Unix Expanded Uppercase.""" word_fmt = "%.2X" @@ -211,7 +214,7 @@ def execute_command_getter(self): def _process_mac_address(self, mac_address): """Convert a mac address to match the value stored by Nautobot.""" - return str(EUI(mac_address, version=48, dialect=mac_unix_expanded_uppercase)) + return str(EUI(mac_address, version=48, dialect=MacUnixExpandedUppercase)) def load_devices(self): """Load devices into the DiffSync store.""" @@ -252,6 +255,8 @@ def load_ip_addresses(self): for hostname, device_data in self.device_data.items(): for interface_name, interface_data in device_data["interfaces"].items(): for ip_address in interface_data["ip_addresses"]: + if self.job.debug: + self.job.logger.debug(f"Loading {ip_address} from {interface_name} on {hostname}") network_ip_address = self.ip_address( diffsync=self, host=ip_address["host"], @@ -280,6 +285,8 @@ def load_vlans(self): for hostname, device_data in self.device_data.items(): for interface_name, interface_data in device_data["interfaces"].items(): # add tagged vlans + if self.job.debug: + self.job.logger.debug(f"Loading tagged vlans for {interface_name}") for tagged_vlan in interface_data["tagged_vlans"]: network_vlan = self.vlan( diffsync=self, diff --git a/nautobot_device_onboarding/diffsync/mock_data.py b/nautobot_device_onboarding/diffsync/mock_data.py index 21f25aff..1e9f7152 100644 --- a/nautobot_device_onboarding/diffsync/mock_data.py +++ b/nautobot_device_onboarding/diffsync/mock_data.py @@ -1,3 +1,5 @@ +"""Mock Data for use with Diffsync.""" + # TODO: move this data to testing folder for use in tests network_importer_mock_data = { diff --git a/nautobot_device_onboarding/diffsync/models/network_importer_models.py b/nautobot_device_onboarding/diffsync/models/network_importer_models.py index c48276e2..76c2f0ce 100644 --- a/nautobot_device_onboarding/diffsync/models/network_importer_models.py +++ b/nautobot_device_onboarding/diffsync/models/network_importer_models.py @@ -196,7 +196,7 @@ class NetworkImporterVLAN(DiffSyncModel): @classmethod def create(cls, diffsync, ids, attrs): - """Create a new VLAN""" + """Create a new VLAN.""" location = None try: location = Location.objects.get(name=ids["location__name"]) @@ -265,6 +265,7 @@ def create(cls, diffsync, ids, attrs): return super().create(diffsync, ids, attrs) def update(self, attrs): + """Update tagged vlans.""" interface = Interface.objects.get(**self.get_identifiers()) interface.tagged_vlans.clear() @@ -291,8 +292,7 @@ def update(self, attrs): class NetworkImporterLagToInterface(DiffSyncModel): - """Shared data model representing a LagToInterface""" - + """Shared data model representing a LagToInterface.""" _modelname = "lag_to_interface" _identifiers = ("device__name", "name") _attributes = ("lag__interface__name",) @@ -305,7 +305,7 @@ class NetworkImporterLagToInterface(DiffSyncModel): # TODO: move the create and update method locgic to a single utility function @classmethod def create(cls, diffsync, ids, attrs): - """Assign tagged vlans to an interface.""" + """Assign a lag to an interface.""" if attrs["lag__interface__name"]: # Prevent the sync from attempting to assign lag interface names of 'None' interface = Interface.objects.get(device__name=ids["device__name"], name=ids["name"]) try: @@ -327,6 +327,7 @@ def create(cls, diffsync, ids, attrs): return super().create(diffsync, ids, attrs) def update(self, attrs): + """Update and interface lag.""" interface = Interface.objects.get(**self.get_identifiers()) try: lag_interface = Interface.objects.get( diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 2c01b15f..c9107af0 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -16,11 +16,11 @@ from nautobot_ssot.jobs.base import DataSource from nornir import InitNornir from nornir.core.plugins.inventory import InventoryPluginRegister -from nornir.core.task import Result, Task -from nornir_nautobot.exceptions import NornirNautobotException +# from nornir.core.task import Result, Task +# from nornir_nautobot.exceptions import NornirNautobotException from nornir_netmiko import netmiko_send_command -from nautobot_device_onboarding.constants import PLATFORM_COMMAND_MAP +# from nautobot_device_onboarding.constants import PLATFORM_COMMAND_MAP from nautobot_device_onboarding.diffsync.adapters.network_importer_adapters import ( NetworkImporterNautobotAdapter, NetworkImporterNetworkAdapter, @@ -94,7 +94,7 @@ class OnboardingTask(Job): # pylint: disable=too-many-instance-attributes description="If an exception occurs, log the exception and continue to next device.", ) - class Meta: # pylint: disable=too-few-public-methods + class Meta: """Meta object boilerplate for onboarding.""" name = "Perform Device Onboarding" @@ -228,7 +228,7 @@ def __init__(self): super().__init__() self.diffsync_flags = DiffSyncFlags.SKIP_UNMATCHED_DST - class Meta: # pylint: disable=too-few-public-methods + class Meta: """Metadata about this Job.""" name = "Sync Devices" @@ -324,7 +324,7 @@ def run( platform, *args, **kwargs, - ): # pylint:disable=arguments-differ, too-many-arguments, too-many-locals + ): """Run sync.""" self.dryrun = dryrun self.memory_profiling = memory_profiling @@ -370,7 +370,7 @@ def __init__(self): super().__init__() self.filtered_devices = None - class Meta: # pylint: disable=too-few-public-methods + class Meta: """Metadata about this Job.""" name = "Sync Network Data" @@ -446,7 +446,7 @@ def run( tag, *args, **kwargs, - ): # pylint:disable=arguments-differ, disable=too-many-arguments + ): """Run sync.""" self.dryrun = dryrun self.memory_profiling = memory_profiling @@ -489,7 +489,7 @@ def run( class CommandGetterDO(Job): """Simple Job to Execute Show Command.""" - class Meta: # pylint: disable=too-few-public-methods + class Meta: """Meta object boilerplate for onboarding.""" name = "Command Getter for Device Onboarding" @@ -584,7 +584,7 @@ class CommandGetterNetworkImporter(Job): port = IntegerVar(default=22) timeout = IntegerVar(default=30) - class Meta: # pylint: disable=too-few-public-methods + class Meta: """Meta object boilerplate for onboarding.""" name = "Command Getter for Network Importer" @@ -609,7 +609,6 @@ def run(self, *args, **kwargs): ) as nornir_obj: commands = ["show version", "show interfaces", "show vlan", "show interfaces switchport"] all_results = {} - for command in commands: command_result = nornir_obj.run(task=netmiko_send_command, command_string=command, use_textfsm=True) @@ -632,7 +631,7 @@ def run(self, *args, **kwargs): for interface_info in result.result: self.logger.info(f"Interface Info: {interface_info}") interface_name = interface_info.get("interface") - media_type = interface_info.get("media_type") + # media_type = interface_info.get("media_type") hardware_type = interface_info.get("hardware_type") mtu = interface_info.get("mtu") description = interface_info.get("description") @@ -646,11 +645,11 @@ def run(self, *args, **kwargs): else: link_status = False - type = normalize_interface_type(hardware_type) + interface_type = normalize_interface_type(hardware_type) all_results[host_name]["interfaces"][interface_name] = { "mtu": mtu, - "type": type, + "type": interface_type, "description": description, "mac_address": mac_address, "enabled": link_status, diff --git a/nautobot_device_onboarding/onboarding/onboarding.py b/nautobot_device_onboarding/onboarding/onboarding.py index bd67d96b..1c06a233 100644 --- a/nautobot_device_onboarding/onboarding/onboarding.py +++ b/nautobot_device_onboarding/onboarding/onboarding.py @@ -3,7 +3,7 @@ from nautobot_device_onboarding.nautobot_keeper import NautobotKeeper -class Onboarding: # pylint: disable=too-few-public-methods +class Onboarding: """Generic onboarding class.""" def __init__(self): @@ -16,7 +16,7 @@ def run(self, onboarding_kwargs): raise NotImplementedError -class StandaloneOnboarding(Onboarding): # pylint: disable=too-few-public-methods +class StandaloneOnboarding(Onboarding): """Standalone onboarding class.""" def run(self, onboarding_kwargs): diff --git a/nautobot_device_onboarding/tests/test_onboarding.py b/nautobot_device_onboarding/tests/test_onboarding.py index d5167c0b..e2513064 100644 --- a/nautobot_device_onboarding/tests/test_onboarding.py +++ b/nautobot_device_onboarding/tests/test_onboarding.py @@ -15,7 +15,7 @@ PLUGIN_SETTINGS = settings.PLUGINS_CONFIG["nautobot_device_onboarding"] -class NapalmMock: # pylint: disable=too-few-public-methods +class NapalmMock: """Base napalm mock class for tests.""" def __init__(self, *args, **kwargs): @@ -67,7 +67,7 @@ def get_interfaces_ip(self): return {"Vlan100": {"ipv4": {"2.2.2.2": {"prefix_length": 32}}}} -class SSHDetectMock: # pylint: disable=too-few-public-methods +class SSHDetectMock: """SSHDetect mock class for tests.""" def __init__(self, *args, **kwargs): diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index 98d529fe..369fb700 100644 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -99,6 +99,7 @@ def format_ob_data_junos(host, result): def normalize_interface_name(interface_name): + """Normalize interface names.""" for interface_abbreviation, interface_full in CISCO_INTERFACE_ABBREVIATIONS.items(): if interface_name.startswith(interface_abbreviation): interface_name = interface_name.replace(interface_abbreviation, interface_full, 1) @@ -107,18 +108,21 @@ def normalize_interface_name(interface_name): def normalize_interface_type(interface_type): + """Normalize interface types.""" if interface_type in CISCO_TO_NAUTOBOT_INTERFACE_TYPE: return CISCO_TO_NAUTOBOT_INTERFACE_TYPE[interface_type] return "other" def normalize_tagged_interface(tagged_interface): + """Normalize tagged interface types.""" if tagged_interface in TAGGED_INTERFACE_TYPES: return TAGGED_INTERFACE_TYPES[tagged_interface] return "" def format_ni_data_cisco_ios(command, command_result): + """Format cisco_ios data.""" all_results = {} # command = ["show version", "show interfaces", "show vlan", "show interfaces switchport"] for host_name, result in command_result.items(): diff --git a/nautobot_device_onboarding/utils/helper.py b/nautobot_device_onboarding/utils/helper.py index 8dc0580f..d5a86150 100644 --- a/nautobot_device_onboarding/utils/helper.py +++ b/nautobot_device_onboarding/utils/helper.py @@ -1,3 +1,5 @@ +"""helper.py.""" + from nautobot.dcim.filters import DeviceFilterSet from nautobot.dcim.models import Device from nornir_nautobot.exceptions import NornirNautobotException diff --git a/poetry.lock b/poetry.lock index 702597a5..80918504 100644 --- a/poetry.lock +++ b/poetry.lock @@ -113,6 +113,21 @@ tests = ["attrs[tests-no-zope]", "zope-interface"] tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] +[[package]] +name = "autopep8" +version = "2.0.0" +description = "A tool that automatically formats Python code to conform to the PEP 8 style guide" +optional = false +python-versions = "*" +files = [ + {file = "autopep8-2.0.0-py2.py3-none-any.whl", hash = "sha256:ad924b42c2e27a1ac58e432166cc4588f5b80747de02d0d35b1ecbd3e7d57207"}, + {file = "autopep8-2.0.0.tar.gz", hash = "sha256:8b1659c7f003e693199f52caffdc06585bb0716900bbc6a7442fd931d658c077"}, +] + +[package.dependencies] +pycodestyle = ">=2.9.1" +tomli = "*" + [[package]] name = "backports-zoneinfo" version = "0.2.1" @@ -220,33 +235,33 @@ files = [ [[package]] name = "black" -version = "24.1.1" +version = "24.2.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-24.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2588021038bd5ada078de606f2a804cadd0a3cc6a79cb3e9bb3a8bf581325a4c"}, - {file = "black-24.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a95915c98d6e32ca43809d46d932e2abc5f1f7d582ffbe65a5b4d1588af7445"}, - {file = "black-24.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fa6a0e965779c8f2afb286f9ef798df770ba2b6cee063c650b96adec22c056a"}, - {file = "black-24.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5242ecd9e990aeb995b6d03dc3b2d112d4a78f2083e5a8e86d566340ae80fec4"}, - {file = "black-24.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fc1ec9aa6f4d98d022101e015261c056ddebe3da6a8ccfc2c792cbe0349d48b7"}, - {file = "black-24.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0269dfdea12442022e88043d2910429bed717b2d04523867a85dacce535916b8"}, - {file = "black-24.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3d64db762eae4a5ce04b6e3dd745dcca0fb9560eb931a5be97472e38652a161"}, - {file = "black-24.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:5d7b06ea8816cbd4becfe5f70accae953c53c0e53aa98730ceccb0395520ee5d"}, - {file = "black-24.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e2c8dfa14677f90d976f68e0c923947ae68fa3961d61ee30976c388adc0b02c8"}, - {file = "black-24.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a21725862d0e855ae05da1dd25e3825ed712eaaccef6b03017fe0853a01aa45e"}, - {file = "black-24.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07204d078e25327aad9ed2c64790d681238686bce254c910de640c7cc4fc3aa6"}, - {file = "black-24.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:a83fe522d9698d8f9a101b860b1ee154c1d25f8a82ceb807d319f085b2627c5b"}, - {file = "black-24.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:08b34e85170d368c37ca7bf81cf67ac863c9d1963b2c1780c39102187ec8dd62"}, - {file = "black-24.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7258c27115c1e3b5de9ac6c4f9957e3ee2c02c0b39222a24dc7aa03ba0e986f5"}, - {file = "black-24.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40657e1b78212d582a0edecafef133cf1dd02e6677f539b669db4746150d38f6"}, - {file = "black-24.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:e298d588744efda02379521a19639ebcd314fba7a49be22136204d7ed1782717"}, - {file = "black-24.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:34afe9da5056aa123b8bfda1664bfe6fb4e9c6f311d8e4a6eb089da9a9173bf9"}, - {file = "black-24.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:854c06fb86fd854140f37fb24dbf10621f5dab9e3b0c29a690ba595e3d543024"}, - {file = "black-24.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3897ae5a21ca132efa219c029cce5e6bfc9c3d34ed7e892113d199c0b1b444a2"}, - {file = "black-24.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:ecba2a15dfb2d97105be74bbfe5128bc5e9fa8477d8c46766505c1dda5883aac"}, - {file = "black-24.1.1-py3-none-any.whl", hash = "sha256:5cdc2e2195212208fbcae579b931407c1fa9997584f0a415421748aeafff1168"}, - {file = "black-24.1.1.tar.gz", hash = "sha256:48b5760dcbfe5cf97fd4fba23946681f3a81514c6ab8a45b50da67ac8fbc6c7b"}, + {file = "black-24.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6981eae48b3b33399c8757036c7f5d48a535b962a7c2310d19361edeef64ce29"}, + {file = "black-24.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d533d5e3259720fdbc1b37444491b024003e012c5173f7d06825a77508085430"}, + {file = "black-24.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61a0391772490ddfb8a693c067df1ef5227257e72b0e4108482b8d41b5aee13f"}, + {file = "black-24.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:992e451b04667116680cb88f63449267c13e1ad134f30087dec8527242e9862a"}, + {file = "black-24.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:163baf4ef40e6897a2a9b83890e59141cc8c2a98f2dda5080dc15c00ee1e62cd"}, + {file = "black-24.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e37c99f89929af50ffaf912454b3e3b47fd64109659026b678c091a4cd450fb2"}, + {file = "black-24.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9de21bafcba9683853f6c96c2d515e364aee631b178eaa5145fc1c61a3cc92"}, + {file = "black-24.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:9db528bccb9e8e20c08e716b3b09c6bdd64da0dd129b11e160bf082d4642ac23"}, + {file = "black-24.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d84f29eb3ee44859052073b7636533ec995bd0f64e2fb43aeceefc70090e752b"}, + {file = "black-24.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e08fb9a15c914b81dd734ddd7fb10513016e5ce7e6704bdd5e1251ceee51ac9"}, + {file = "black-24.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:810d445ae6069ce64030c78ff6127cd9cd178a9ac3361435708b907d8a04c693"}, + {file = "black-24.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:ba15742a13de85e9b8f3239c8f807723991fbfae24bad92d34a2b12e81904982"}, + {file = "black-24.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e53a8c630f71db01b28cd9602a1ada68c937cbf2c333e6ed041390d6968faf4"}, + {file = "black-24.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:93601c2deb321b4bad8f95df408e3fb3943d85012dddb6121336b8e24a0d1218"}, + {file = "black-24.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0057f800de6acc4407fe75bb147b0c2b5cbb7c3ed110d3e5999cd01184d53b0"}, + {file = "black-24.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:faf2ee02e6612577ba0181f4347bcbcf591eb122f7841ae5ba233d12c39dcb4d"}, + {file = "black-24.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:057c3dc602eaa6fdc451069bd027a1b2635028b575a6c3acfd63193ced20d9c8"}, + {file = "black-24.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:08654d0797e65f2423f850fc8e16a0ce50925f9337fb4a4a176a7aa4026e63f8"}, + {file = "black-24.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca610d29415ee1a30a3f30fab7a8f4144e9d34c89a235d81292a1edb2b55f540"}, + {file = "black-24.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:4dd76e9468d5536abd40ffbc7a247f83b2324f0c050556d9c371c2b9a9a95e31"}, + {file = "black-24.2.0-py3-none-any.whl", hash = "sha256:e8a6ae970537e67830776488bca52000eaa37fa63b9988e8c487458d9cd5ace6"}, + {file = "black-24.2.0.tar.gz", hash = "sha256:bce4f25c27c3435e4dace4815bcb2008b87e167e3bf4ee47ccdc5ce906eb4894"}, ] [package.dependencies] @@ -322,13 +337,13 @@ zstd = ["zstandard (==0.22.0)"] [[package]] name = "certifi" -version = "2023.11.17" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] @@ -648,43 +663,43 @@ dev = ["polib"] [[package]] name = "cryptography" -version = "42.0.1" +version = "42.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:265bdc693570b895eb641410b8fc9e8ddbce723a669236162b9d9cfb70bd8d77"}, - {file = "cryptography-42.0.1-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:160fa08dfa6dca9cb8ad9bd84e080c0db6414ba5ad9a7470bc60fb154f60111e"}, - {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:727387886c9c8de927c360a396c5edcb9340d9e960cda145fca75bdafdabd24c"}, - {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d84673c012aa698555d4710dcfe5f8a0ad76ea9dde8ef803128cc669640a2e0"}, - {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e6edc3a568667daf7d349d7e820783426ee4f1c0feab86c29bd1d6fe2755e009"}, - {file = "cryptography-42.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:d50718dd574a49d3ef3f7ef7ece66ef281b527951eb2267ce570425459f6a404"}, - {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:9544492e8024f29919eac2117edd8c950165e74eb551a22c53f6fdf6ba5f4cb8"}, - {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ab6b302d51fbb1dd339abc6f139a480de14d49d50f65fdc7dff782aa8631d035"}, - {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2fe16624637d6e3e765530bc55caa786ff2cbca67371d306e5d0a72e7c3d0407"}, - {file = "cryptography-42.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ed1b2130f5456a09a134cc505a17fc2830a1a48ed53efd37dcc904a23d7b82fa"}, - {file = "cryptography-42.0.1-cp37-abi3-win32.whl", hash = "sha256:e5edf189431b4d51f5c6fb4a95084a75cef6b4646c934eb6e32304fc720e1453"}, - {file = "cryptography-42.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:6bfd823b336fdcd8e06285ae8883d3d2624d3bdef312a0e2ef905f332f8e9302"}, - {file = "cryptography-42.0.1-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:351db02c1938c8e6b1fee8a78d6b15c5ccceca7a36b5ce48390479143da3b411"}, - {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:430100abed6d3652208ae1dd410c8396213baee2e01a003a4449357db7dc9e14"}, - {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dff7a32880a51321f5de7869ac9dde6b1fca00fc1fef89d60e93f215468e824"}, - {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b512f33c6ab195852595187af5440d01bb5f8dd57cb7a91e1e009a17f1b7ebca"}, - {file = "cryptography-42.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:95d900d19a370ae36087cc728e6e7be9c964ffd8cbcb517fd1efb9c9284a6abc"}, - {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:6ac8924085ed8287545cba89dc472fc224c10cc634cdf2c3e2866fe868108e77"}, - {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cb2861a9364fa27d24832c718150fdbf9ce6781d7dc246a516435f57cfa31fe7"}, - {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25ec6e9e81de5d39f111a4114193dbd39167cc4bbd31c30471cebedc2a92c323"}, - {file = "cryptography-42.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:9d61fcdf37647765086030d81872488e4cb3fafe1d2dda1d487875c3709c0a49"}, - {file = "cryptography-42.0.1-cp39-abi3-win32.whl", hash = "sha256:16b9260d04a0bfc8952b00335ff54f471309d3eb9d7e8dbfe9b0bd9e26e67881"}, - {file = "cryptography-42.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:7911586fc69d06cd0ab3f874a169433db1bc2f0e40988661408ac06c4527a986"}, - {file = "cryptography-42.0.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d3594947d2507d4ef7a180a7f49a6db41f75fb874c2fd0e94f36b89bfd678bf2"}, - {file = "cryptography-42.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:8d7efb6bf427d2add2f40b6e1e8e476c17508fa8907234775214b153e69c2e11"}, - {file = "cryptography-42.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:126e0ba3cc754b200a2fb88f67d66de0d9b9e94070c5bc548318c8dab6383cb6"}, - {file = "cryptography-42.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:802d6f83233cf9696b59b09eb067e6b4d5ae40942feeb8e13b213c8fad47f1aa"}, - {file = "cryptography-42.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0b7cacc142260ada944de070ce810c3e2a438963ee3deb45aa26fd2cee94c9a4"}, - {file = "cryptography-42.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:32ea63ceeae870f1a62e87f9727359174089f7b4b01e4999750827bf10e15d60"}, - {file = "cryptography-42.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d3902c779a92151f134f68e555dd0b17c658e13429f270d8a847399b99235a3f"}, - {file = "cryptography-42.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:50aecd93676bcca78379604ed664c45da82bc1241ffb6f97f6b7392ed5bc6f04"}, - {file = "cryptography-42.0.1.tar.gz", hash = "sha256:fd33f53809bb363cf126bebe7a99d97735988d9b0131a2be59fbf83e1259a5b7"}, + {file = "cryptography-42.0.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:701171f825dcab90969596ce2af253143b93b08f1a716d4b2a9d2db5084ef7be"}, + {file = "cryptography-42.0.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:61321672b3ac7aade25c40449ccedbc6db72c7f5f0fdf34def5e2f8b51ca530d"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea2c3ffb662fec8bbbfce5602e2c159ff097a4631d96235fcf0fb00e59e3ece4"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b15c678f27d66d247132cbf13df2f75255627bcc9b6a570f7d2fd08e8c081d2"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8e88bb9eafbf6a4014d55fb222e7360eef53e613215085e65a13290577394529"}, + {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a047682d324ba56e61b7ea7c7299d51e61fd3bca7dad2ccc39b72bd0118d60a1"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:36d4b7c4be6411f58f60d9ce555a73df8406d484ba12a63549c88bd64f7967f1"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a00aee5d1b6c20620161984f8ab2ab69134466c51f58c052c11b076715e72929"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b97fe7d7991c25e6a31e5d5e795986b18fbbb3107b873d5f3ae6dc9a103278e9"}, + {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5fa82a26f92871eca593b53359c12ad7949772462f887c35edaf36f87953c0e2"}, + {file = "cryptography-42.0.2-cp37-abi3-win32.whl", hash = "sha256:4b063d3413f853e056161eb0c7724822a9740ad3caa24b8424d776cebf98e7ee"}, + {file = "cryptography-42.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:841ec8af7a8491ac76ec5a9522226e287187a3107e12b7d686ad354bb78facee"}, + {file = "cryptography-42.0.2-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:55d1580e2d7e17f45d19d3b12098e352f3a37fe86d380bf45846ef257054b242"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28cb2c41f131a5758d6ba6a0504150d644054fd9f3203a1e8e8d7ac3aea7f73a"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9097a208875fc7bbeb1286d0125d90bdfed961f61f214d3f5be62cd4ed8a446"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:44c95c0e96b3cb628e8452ec060413a49002a247b2b9938989e23a2c8291fc90"}, + {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2f9f14185962e6a04ab32d1abe34eae8a9001569ee4edb64d2304bf0d65c53f3"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:09a77e5b2e8ca732a19a90c5bca2d124621a1edb5438c5daa2d2738bfeb02589"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad28cff53f60d99a928dfcf1e861e0b2ceb2bc1f08a074fdd601b314e1cc9e0a"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:130c0f77022b2b9c99d8cebcdd834d81705f61c68e91ddd614ce74c657f8b3ea"}, + {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:fa3dec4ba8fb6e662770b74f62f1a0c7d4e37e25b58b2bf2c1be4c95372b4a33"}, + {file = "cryptography-42.0.2-cp39-abi3-win32.whl", hash = "sha256:3dbd37e14ce795b4af61b89b037d4bc157f2cb23e676fa16932185a04dfbf635"}, + {file = "cryptography-42.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:8a06641fb07d4e8f6c7dda4fc3f8871d327803ab6542e33831c7ccfdcb4d0ad6"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:087887e55e0b9c8724cf05361357875adb5c20dec27e5816b653492980d20380"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a7ef8dd0bf2e1d0a27042b231a3baac6883cdd5557036f5e8df7139255feaac6"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4383b47f45b14459cab66048d384614019965ba6c1a1a141f11b5a551cace1b2"}, + {file = "cryptography-42.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:fbeb725c9dc799a574518109336acccaf1303c30d45c075c665c0793c2f79a7f"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:320948ab49883557a256eab46149df79435a22d2fefd6a66fe6946f1b9d9d008"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5ef9bc3d046ce83c4bbf4c25e1e0547b9c441c01d30922d812e887dc5f125c12"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:52ed9ebf8ac602385126c9a2fe951db36f2cb0c2538d22971487f89d0de4065a"}, + {file = "cryptography-42.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:141e2aa5ba100d3788c0ad7919b288f89d1fe015878b9659b307c9ef867d3a65"}, + {file = "cryptography-42.0.2.tar.gz", hash = "sha256:e0ec52ba3c7f1b7d813cd52649a5b3ef1fc0d433219dc8c93827c57eab6cf888"}, ] [package.dependencies] @@ -749,13 +764,13 @@ profile = ["gprof2dot (>=2022.7.29)"] [[package]] name = "django" -version = "3.2.23" +version = "3.2.24" description = "A high-level Python Web framework that encourages rapid development and clean, pragmatic design." optional = false python-versions = ">=3.6" files = [ - {file = "Django-3.2.23-py3-none-any.whl", hash = "sha256:d48608d5f62f2c1e260986835db089fa3b79d6f58510881d316b8d88345ae6e1"}, - {file = "Django-3.2.23.tar.gz", hash = "sha256:82968f3640e29ef4a773af2c28448f5f7a08d001c6ac05b32d02aeee6509508b"}, + {file = "Django-3.2.24-py3-none-any.whl", hash = "sha256:5dd5b787c3ba39637610fe700f54bf158e33560ea0dba600c19921e7ff926ec5"}, + {file = "Django-3.2.24.tar.gz", hash = "sha256:aaee9fb0fb4ebd4311520887ad2e33313d368846607f82a9a0ed461cd4c35b18"}, ] [package.dependencies] @@ -846,12 +861,13 @@ Django = ">=3.2" [[package]] name = "django-db-file-storage" -version = "0.5.5" +version = "0.5.6.1" description = "Custom FILE_STORAGE for Django. Saves files in your database instead of your file system." optional = false python-versions = "*" files = [ - {file = "django-db-file-storage-0.5.5.tar.gz", hash = "sha256:5d5da694b78ab202accab4508b958e0e37b3d146310e76f6f6125e1bdeaaad14"}, + {file = "django-db-file-storage-0.5.6.1.tar.gz", hash = "sha256:f0c4540ed6b772e8b3141eae3222acde4c29ab771477a5c999013a3980856c7f"}, + {file = "django_db_file_storage-0.5.6.1-py3-none-any.whl", hash = "sha256:3feac1e060b550c3c03c35e95d2111d9f100bc247863ace691a78b107f1fc3d5"}, ] [package.dependencies] @@ -859,13 +875,13 @@ Django = "*" [[package]] name = "django-debug-toolbar" -version = "4.2.0" +version = "4.3.0" description = "A configurable set of panels that display various debug information about the current request/response." optional = false python-versions = ">=3.8" files = [ - {file = "django_debug_toolbar-4.2.0-py3-none-any.whl", hash = "sha256:af99128c06e8e794479e65ab62cc6c7d1e74e1c19beb44dcbf9bad7a9c017327"}, - {file = "django_debug_toolbar-4.2.0.tar.gz", hash = "sha256:bc7fdaafafcdedefcc67a4a5ad9dac96efd6e41db15bc74d402a54a2ba4854dc"}, + {file = "django_debug_toolbar-4.3.0-py3-none-any.whl", hash = "sha256:e09b7dcb8417b743234dfc57c95a7c1d1d87a88844abd13b4c5387f807b31bf6"}, + {file = "django_debug_toolbar-4.3.0.tar.gz", hash = "sha256:0b0dddee5ea29b9cb678593bc0d7a6d76b21d7799cb68e091a2148341a80f3c4"}, ] [package.dependencies] @@ -982,6 +998,23 @@ redis = ">=3,<4.0.0 || >4.0.0,<4.0.1 || >4.0.1" [package.extras] hiredis = ["redis[hiredis] (>=3,!=4.0.0,!=4.0.1)"] +[[package]] +name = "django-silk" +version = "5.1.0" +description = "Silky smooth profiling for the Django Framework" +optional = false +python-versions = ">=3.8" +files = [ + {file = "django-silk-5.1.0.tar.gz", hash = "sha256:34abb5852315f0f3303d45b7ab4a2caa9cf670102b614dbb2ac40a5d2d5cbffb"}, + {file = "django_silk-5.1.0-py3-none-any.whl", hash = "sha256:35a2051672b0be86af4ce734a0df0b6674c8c63f2df730b3756ec6e52923707d"}, +] + +[package.dependencies] +autopep8 = "*" +Django = ">=3.2" +gprof2dot = ">=2017.09.19" +sqlparse = "*" + [[package]] name = "django-tables2" version = "2.6.0" @@ -1119,13 +1152,13 @@ sidecar = ["drf-spectacular-sidecar"] [[package]] name = "drf-spectacular-sidecar" -version = "2024.1.1" +version = "2024.2.1" description = "Serve self-contained distribution builds of Swagger UI and Redoc with Django" optional = false python-versions = ">=3.6" files = [ - {file = "drf-spectacular-sidecar-2024.1.1.tar.gz", hash = "sha256:099ec58b6af6a90e851a9329b12a57aa1ee7daa6cef62fb504f2ed302f10da76"}, - {file = "drf_spectacular_sidecar-2024.1.1-py3-none-any.whl", hash = "sha256:4b9e33b4dcfa43f84e3db2659d31766a018a2b98b02d8856d9cd69580a4911c9"}, + {file = "drf-spectacular-sidecar-2024.2.1.tar.gz", hash = "sha256:db95a38971c9be09986356f82041fac60183d28ebdf60c0c51eb8c1f86da3937"}, + {file = "drf_spectacular_sidecar-2024.2.1-py3-none-any.whl", hash = "sha256:dc819ef7a35448c18b2bf4273b38fe1468e14daea5fc8675afb5d0f9e6d9a0ba"}, ] [package.dependencies] @@ -1233,6 +1266,17 @@ gitdb = ">=4.0.1,<5" [package.extras] test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "sumtypes"] +[[package]] +name = "gprof2dot" +version = "2022.7.29" +description = "Generate a dot graph from the output of several profilers." +optional = false +python-versions = ">=2.7" +files = [ + {file = "gprof2dot-2022.7.29-py2.py3-none-any.whl", hash = "sha256:f165b3851d3c52ee4915eb1bd6cca571e5759823c2cd0f71a79bda93c2dc85d6"}, + {file = "gprof2dot-2022.7.29.tar.gz", hash = "sha256:45b4d298bd36608fccf9511c3fd88a773f7a1abc04d6cd39445b11ba43133ec5"}, +] + [[package]] name = "graphene" version = "2.1.9" @@ -1327,13 +1371,13 @@ six = ">=1.12" [[package]] name = "griffe" -version = "0.39.1" +version = "0.40.1" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." optional = false python-versions = ">=3.8" files = [ - {file = "griffe-0.39.1-py3-none-any.whl", hash = "sha256:6ce4ecffcf0d2f96362c5974b3f7df812da8f8d4cfcc5ebc8202ef72656fc087"}, - {file = "griffe-0.39.1.tar.gz", hash = "sha256:ead8dfede6e6531cce6bf69090a4f3c6d36fdf923c43f8e85aa530552cef0c09"}, + {file = "griffe-0.40.1-py3-none-any.whl", hash = "sha256:5b8c023f366fe273e762131fe4bfd141ea56c09b3cb825aa92d06a82681cfd93"}, + {file = "griffe-0.40.1.tar.gz", hash = "sha256:66c48a62e2ce5784b6940e603300fcfb807b6f099b94e7f753f1841661fd5c7c"}, ] [package.dependencies] @@ -1773,71 +1817,71 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "2.1.4" +version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de8153a7aae3835484ac168a9a9bdaa0c5eee4e0bc595503c95d53b942879c84"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e888ff76ceb39601c59e219f281466c6d7e66bd375b4ec1ce83bcdc68306796b"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0b838c37ba596fcbfca71651a104a611543077156cb0a26fe0c475e1f152ee8"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac1ebf6983148b45b5fa48593950f90ed6d1d26300604f321c74a9ca1609f8e"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbad3d346df8f9d72622ac71b69565e621ada2ce6572f37c2eae8dacd60385d"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5291d98cd3ad9a562883468c690a2a238c4a6388ab3bd155b0c75dd55ece858"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a7cc49ef48a3c7a0005a949f3c04f8baa5409d3f663a1b36f0eba9bfe2a0396e"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b83041cda633871572f0d3c41dddd5582ad7d22f65a72eacd8d3d6d00291df26"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-win32.whl", hash = "sha256:0c26f67b3fe27302d3a412b85ef696792c4a2386293c53ba683a89562f9399b0"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:a76055d5cb1c23485d7ddae533229039b850db711c554a12ea64a0fd8a0129e2"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9e9e3c4020aa2dc62d5dd6743a69e399ce3de58320522948af6140ac959ab863"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0042d6a9880b38e1dd9ff83146cc3c9c18a059b9360ceae207805567aacccc69"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d03fea4c4e9fd0ad75dc2e7e2b6757b80c152c032ea1d1de487461d8140efc"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ab3a886a237f6e9c9f4f7d272067e712cdb4efa774bef494dccad08f39d8ae6"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf5ebbec056817057bfafc0445916bb688a255a5146f900445d081db08cbabb"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e1a0d1924a5013d4f294087e00024ad25668234569289650929ab871231668e7"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e7902211afd0af05fbadcc9a312e4cf10f27b779cf1323e78d52377ae4b72bea"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c669391319973e49a7c6230c218a1e3044710bc1ce4c8e6eb71f7e6d43a2c131"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-win32.whl", hash = "sha256:31f57d64c336b8ccb1966d156932f3daa4fee74176b0fdc48ef580be774aae74"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:54a7e1380dfece8847c71bf7e33da5d084e9b889c75eca19100ef98027bd9f56"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a76cd37d229fc385738bd1ce4cba2a121cf26b53864c1772694ad0ad348e509e"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:987d13fe1d23e12a66ca2073b8d2e2a75cec2ecb8eab43ff5624ba0ad42764bc"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5244324676254697fe5c181fc762284e2c5fceeb1c4e3e7f6aca2b6f107e60dc"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78bc995e004681246e85e28e068111a4c3f35f34e6c62da1471e844ee1446250"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4d176cfdfde84f732c4a53109b293d05883e952bbba68b857ae446fa3119b4f"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f9917691f410a2e0897d1ef99619fd3f7dd503647c8ff2475bf90c3cf222ad74"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f06e5a9e99b7df44640767842f414ed5d7bedaaa78cd817ce04bbd6fd86e2dd6"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396549cea79e8ca4ba65525470d534e8a41070e6b3500ce2414921099cb73e8d"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-win32.whl", hash = "sha256:f6be2d708a9d0e9b0054856f07ac7070fbe1754be40ca8525d5adccdbda8f475"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:5045e892cfdaecc5b4c01822f353cf2c8feb88a6ec1c0adef2a2e705eef0f656"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7a07f40ef8f0fbc5ef1000d0c78771f4d5ca03b4953fc162749772916b298fc4"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d18b66fe626ac412d96c2ab536306c736c66cf2a31c243a45025156cc190dc8a"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:698e84142f3f884114ea8cf83e7a67ca8f4ace8454e78fe960646c6c91c63bfa"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a3b78a5af63ec10d8604180380c13dcd870aba7928c1fe04e881d5c792dc4e"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:15866d7f2dc60cfdde12ebb4e75e41be862348b4728300c36cdf405e258415ec"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6aa5e2e7fc9bc042ae82d8b79d795b9a62bd8f15ba1e7594e3db243f158b5565"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:54635102ba3cf5da26eb6f96c4b8c53af8a9c0d97b64bdcb592596a6255d8518"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-win32.whl", hash = "sha256:3583a3a3ab7958e354dc1d25be74aee6228938312ee875a22330c4dc2e41beb0"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-win_amd64.whl", hash = "sha256:d6e427c7378c7f1b2bef6a344c925b8b63623d3321c09a237b7cc0e77dd98ceb"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:bf1196dcc239e608605b716e7b166eb5faf4bc192f8a44b81e85251e62584bd2"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4df98d4a9cd6a88d6a585852f56f2155c9cdb6aec78361a19f938810aa020954"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b835aba863195269ea358cecc21b400276747cc977492319fd7682b8cd2c253d"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23984d1bdae01bee794267424af55eef4dfc038dc5d1272860669b2aa025c9e3"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c98c33ffe20e9a489145d97070a435ea0679fddaabcafe19982fe9c971987d5"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9896fca4a8eb246defc8b2a7ac77ef7553b638e04fbf170bff78a40fa8a91474"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b0fe73bac2fed83839dbdbe6da84ae2a31c11cfc1c777a40dbd8ac8a6ed1560f"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c7556bafeaa0a50e2fe7dc86e0382dea349ebcad8f010d5a7dc6ba568eaaa789"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-win32.whl", hash = "sha256:fc1a75aa8f11b87910ffd98de62b29d6520b6d6e8a3de69a70ca34dea85d2a8a"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-win_amd64.whl", hash = "sha256:3a66c36a3864df95e4f62f9167c734b3b1192cb0851b43d7cc08040c074c6279"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:765f036a3d00395a326df2835d8f86b637dbaf9832f90f5d196c3b8a7a5080cb"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:21e7af8091007bf4bebf4521184f4880a6acab8df0df52ef9e513d8e5db23411"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5c31fe855c77cad679b302aabc42d724ed87c043b1432d457f4976add1c2c3e"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7653fa39578957bc42e5ebc15cf4361d9e0ee4b702d7d5ec96cdac860953c5b4"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47bb5f0142b8b64ed1399b6b60f700a580335c8e1c57f2f15587bd072012decc"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fe8512ed897d5daf089e5bd010c3dc03bb1bdae00b35588c49b98268d4a01e00"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:36d7626a8cca4d34216875aee5a1d3d654bb3dac201c1c003d182283e3205949"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b6f14a9cd50c3cb100eb94b3273131c80d102e19bb20253ac7bd7336118a673a"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-win32.whl", hash = "sha256:c8f253a84dbd2c63c19590fa86a032ef3d8cc18923b8049d91bcdeeb2581fbf6"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:8b570a1537367b52396e53325769608f2a687ec9a4363647af1cded8928af959"}, - {file = "MarkupSafe-2.1.4.tar.gz", hash = "sha256:3aae9af4cac263007fd6309c64c6ab4506dd2b79382d9d19a1994f9240b8db4f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] [[package]] @@ -2049,18 +2093,18 @@ typing-extensions = ">=4.3.0" [[package]] name = "nautobot" -version = "2.1.2" +version = "2.1.4" description = "Source of truth and network automation platform." optional = false python-versions = ">=3.8,<3.12" files = [ - {file = "nautobot-2.1.2-py3-none-any.whl", hash = "sha256:13fffb9ff7bf6dbee0df492256bc37060bea4229d71461b0b7447839bc35873a"}, - {file = "nautobot-2.1.2.tar.gz", hash = "sha256:185c1a1556c77f6ed5f2c9ed82aeea1f2b385b0ea2ceb480c78a6dbec8ef07d1"}, + {file = "nautobot-2.1.4-py3-none-any.whl", hash = "sha256:b1311cb8bda428ee1b5b7074ce75ef99aaffd31a29207a69339fa92cea2de729"}, + {file = "nautobot-2.1.4.tar.gz", hash = "sha256:50e64ba399485631fc694c489b3b47a3c300f7914f8856cff2819d076474245b"}, ] [package.dependencies] celery = ">=5.3.1,<5.4.0" -Django = ">=3.2.23,<3.3.0" +Django = ">=3.2.24,<3.3.0" django-ajax-tables = ">=1.1.1,<1.2.0" django-celery-beat = ">=2.5.0,<2.6.0" django-celery-results = ">=2.4.0,<2.5.0" @@ -2073,6 +2117,7 @@ django-health-check = ">=3.17.0,<3.18.0" django-jinja = ">=2.10.2,<2.11.0" django-prometheus = ">=2.3.1,<2.4.0" django-redis = ">=5.3.0,<5.4.0" +django-silk = ">=5.1.0,<5.2.0" django-tables2 = ">=2.6.0,<2.7.0" django-taggit = ">=4.0.0,<4.1.0" django-timezone-field = ">=5.1,<5.2" @@ -2088,24 +2133,24 @@ graphene-django-optimizer = ">=0.8.0,<0.9.0" Jinja2 = ">=3.1.3,<3.2.0" jsonschema = ">=4.7.0,<4.19.0" Markdown = ">=3.3.7,<3.4.0" -MarkupSafe = ">=2.1.3,<2.2.0" +MarkupSafe = ">=2.1.5,<2.2.0" netaddr = ">=0.8.0,<0.9.0" netutils = ">=1.6.0,<2.0.0" nh3 = ">=0.2.15,<0.3.0" packaging = ">=23.1" -Pillow = ">=10.0.0,<10.1.0" +Pillow = ">=10.2.0,<10.3.0" prometheus-client = ">=0.17.1,<0.18.0" psycopg2-binary = ">=2.9.9,<2.10.0" -python-slugify = ">=8.0.1,<8.1.0" -pyuwsgi = ">=2.0.21,<2.1.0" +python-slugify = ">=8.0.3,<8.1.0" +pyuwsgi = ">=2.0.23,<2.1.0" PyYAML = ">=6.0,<6.1" social-auth-app-django = ">=5.2.0,<5.3.0" svgwrite = ">=1.4.2,<1.5.0" [package.extras] -all = ["django-auth-ldap (>=4.3.0,<4.4.0)", "django-storages (>=1.13.2,<1.14.0)", "mysqlclient (>=2.2.0,<2.3.0)", "napalm (>=4.1.0,<4.2.0)", "social-auth-core[openidconnect,saml] (>=4.4.2,<4.5.0)"] +all = ["django-auth-ldap (>=4.3.0,<4.4.0)", "django-storages (>=1.13.2,<1.14.0)", "mysqlclient (>=2.2.3,<2.3.0)", "napalm (>=4.1.0,<4.2.0)", "social-auth-core[openidconnect,saml] (>=4.4.2,<4.5.0)"] ldap = ["django-auth-ldap (>=4.3.0,<4.4.0)"] -mysql = ["mysqlclient (>=2.2.0,<2.3.0)"] +mysql = ["mysqlclient (>=2.2.3,<2.3.0)"] napalm = ["napalm (>=4.1.0,<4.2.0)"] remote-storage = ["django-storages (>=1.13.2,<1.14.0)"] sso = ["social-auth-core[openidconnect,saml] (>=4.4.2,<4.5.0)"] @@ -2291,13 +2336,13 @@ nornir = ">=3,<4" [[package]] name = "nornir-nautobot" -version = "3.1.0" +version = "3.1.1" description = "Nornir Nautobot" optional = false python-versions = ">=3.8,<4.0" files = [ - {file = "nornir_nautobot-3.1.0-py3-none-any.whl", hash = "sha256:23197181c17fa6de503679490d04fdc7315133ec5ddc9b549eb0794af9da418f"}, - {file = "nornir_nautobot-3.1.0.tar.gz", hash = "sha256:5bc58d83650fb87aec456358205d455aaa5289345e2bc18f32d6bfa421eec63c"}, + {file = "nornir_nautobot-3.1.1-py3-none-any.whl", hash = "sha256:2a21d134ddcedcf2344e5e0d825fb9ab4f32d913294679fe9cfe1eeb19272256"}, + {file = "nornir_nautobot-3.1.1.tar.gz", hash = "sha256:c36ff2d8626131b91d0bdb24967782d39db58e2be3b011b8be5d30e741562556"}, ] [package.dependencies] @@ -2308,7 +2353,7 @@ nornir-jinja2 = ">=0.2.0,<0.3.0" nornir-napalm = ">=0.4.0,<1.0.0" nornir-netmiko = ">=1,<2" nornir-utils = ">=0,<1" -pynautobot = ">=2.0.0rc2" +pynautobot = ">=2.0.2" requests = ">=2.25.1,<3.0.0" [package.extras] @@ -2345,13 +2390,13 @@ nornir = ">=3,<4" [[package]] name = "ntc-templates" -version = "4.2.0" +version = "4.3.0" description = "TextFSM Templates for Network Devices, and Python wrapper for TextFSM's CliTable." optional = false python-versions = ">=3.8,<4.0" files = [ - {file = "ntc_templates-4.2.0-py3-none-any.whl", hash = "sha256:f41471c1375c1a86bb5958358339efe9e95d908ea33866125adafe36fbfe11dd"}, - {file = "ntc_templates-4.2.0.tar.gz", hash = "sha256:a06c0e786aa3aea429d345ea67f59cb6da43557c31aa65914969d0cd6b0c0dde"}, + {file = "ntc_templates-4.3.0-py3-none-any.whl", hash = "sha256:f9b4805dfd9d1516a29ae9f505409c17c7f14c958d47f1c1f57c9486af6164db"}, + {file = "ntc_templates-4.3.0.tar.gz", hash = "sha256:b6902389e86b868d76b64ea55c8225a0aa7aafe910b3a02b2a33b7b18fb27ef1"}, ] [package.dependencies] @@ -2429,70 +2474,88 @@ files = [ [[package]] name = "pillow" -version = "10.0.1" +version = "10.2.0" description = "Python Imaging Library (Fork)" optional = false python-versions = ">=3.8" files = [ - {file = "Pillow-10.0.1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:8f06be50669087250f319b706decf69ca71fdecd829091a37cc89398ca4dc17a"}, - {file = "Pillow-10.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50bd5f1ebafe9362ad622072a1d2f5850ecfa44303531ff14353a4059113b12d"}, - {file = "Pillow-10.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6a90167bcca1216606223a05e2cf991bb25b14695c518bc65639463d7db722d"}, - {file = "Pillow-10.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f11c9102c56ffb9ca87134bd025a43d2aba3f1155f508eff88f694b33a9c6d19"}, - {file = "Pillow-10.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:186f7e04248103482ea6354af6d5bcedb62941ee08f7f788a1c7707bc720c66f"}, - {file = "Pillow-10.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:0462b1496505a3462d0f35dc1c4d7b54069747d65d00ef48e736acda2c8cbdff"}, - {file = "Pillow-10.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d889b53ae2f030f756e61a7bff13684dcd77e9af8b10c6048fb2c559d6ed6eaf"}, - {file = "Pillow-10.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:552912dbca585b74d75279a7570dd29fa43b6d93594abb494ebb31ac19ace6bd"}, - {file = "Pillow-10.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:787bb0169d2385a798888e1122c980c6eff26bf941a8ea79747d35d8f9210ca0"}, - {file = "Pillow-10.0.1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:fd2a5403a75b54661182b75ec6132437a181209b901446ee5724b589af8edef1"}, - {file = "Pillow-10.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2d7e91b4379f7a76b31c2dda84ab9e20c6220488e50f7822e59dac36b0cd92b1"}, - {file = "Pillow-10.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19e9adb3f22d4c416e7cd79b01375b17159d6990003633ff1d8377e21b7f1b21"}, - {file = "Pillow-10.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93139acd8109edcdeffd85e3af8ae7d88b258b3a1e13a038f542b79b6d255c54"}, - {file = "Pillow-10.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:92a23b0431941a33242b1f0ce6c88a952e09feeea9af4e8be48236a68ffe2205"}, - {file = "Pillow-10.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cbe68deb8580462ca0d9eb56a81912f59eb4542e1ef8f987405e35a0179f4ea2"}, - {file = "Pillow-10.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:522ff4ac3aaf839242c6f4e5b406634bfea002469656ae8358644fc6c4856a3b"}, - {file = "Pillow-10.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:84efb46e8d881bb06b35d1d541aa87f574b58e87f781cbba8d200daa835b42e1"}, - {file = "Pillow-10.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:898f1d306298ff40dc1b9ca24824f0488f6f039bc0e25cfb549d3195ffa17088"}, - {file = "Pillow-10.0.1-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:bcf1207e2f2385a576832af02702de104be71301c2696d0012b1b93fe34aaa5b"}, - {file = "Pillow-10.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5d6c9049c6274c1bb565021367431ad04481ebb54872edecfcd6088d27edd6ed"}, - {file = "Pillow-10.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28444cb6ad49726127d6b340217f0627abc8732f1194fd5352dec5e6a0105635"}, - {file = "Pillow-10.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de596695a75496deb3b499c8c4f8e60376e0516e1a774e7bc046f0f48cd620ad"}, - {file = "Pillow-10.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:2872f2d7846cf39b3dbff64bc1104cc48c76145854256451d33c5faa55c04d1a"}, - {file = "Pillow-10.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:4ce90f8a24e1c15465048959f1e94309dfef93af272633e8f37361b824532e91"}, - {file = "Pillow-10.0.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ee7810cf7c83fa227ba9125de6084e5e8b08c59038a7b2c9045ef4dde61663b4"}, - {file = "Pillow-10.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b1be1c872b9b5fcc229adeadbeb51422a9633abd847c0ff87dc4ef9bb184ae08"}, - {file = "Pillow-10.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:98533fd7fa764e5f85eebe56c8e4094db912ccbe6fbf3a58778d543cadd0db08"}, - {file = "Pillow-10.0.1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:764d2c0daf9c4d40ad12fbc0abd5da3af7f8aa11daf87e4fa1b834000f4b6b0a"}, - {file = "Pillow-10.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fcb59711009b0168d6ee0bd8fb5eb259c4ab1717b2f538bbf36bacf207ef7a68"}, - {file = "Pillow-10.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:697a06bdcedd473b35e50a7e7506b1d8ceb832dc238a336bd6f4f5aa91a4b500"}, - {file = "Pillow-10.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f665d1e6474af9f9da5e86c2a3a2d2d6204e04d5af9c06b9d42afa6ebde3f21"}, - {file = "Pillow-10.0.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:2fa6dd2661838c66f1a5473f3b49ab610c98a128fc08afbe81b91a1f0bf8c51d"}, - {file = "Pillow-10.0.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:3a04359f308ebee571a3127fdb1bd01f88ba6f6fb6d087f8dd2e0d9bff43f2a7"}, - {file = "Pillow-10.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:723bd25051454cea9990203405fa6b74e043ea76d4968166dfd2569b0210886a"}, - {file = "Pillow-10.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:71671503e3015da1b50bd18951e2f9daf5b6ffe36d16f1eb2c45711a301521a7"}, - {file = "Pillow-10.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:44e7e4587392953e5e251190a964675f61e4dae88d1e6edbe9f36d6243547ff3"}, - {file = "Pillow-10.0.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:3855447d98cced8670aaa63683808df905e956f00348732448b5a6df67ee5849"}, - {file = "Pillow-10.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ed2d9c0704f2dc4fa980b99d565c0c9a543fe5101c25b3d60488b8ba80f0cce1"}, - {file = "Pillow-10.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5bb289bb835f9fe1a1e9300d011eef4d69661bb9b34d5e196e5e82c4cb09b37"}, - {file = "Pillow-10.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a0d3e54ab1df9df51b914b2233cf779a5a10dfd1ce339d0421748232cea9876"}, - {file = "Pillow-10.0.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:2cc6b86ece42a11f16f55fe8903595eff2b25e0358dec635d0a701ac9586588f"}, - {file = "Pillow-10.0.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:ca26ba5767888c84bf5a0c1a32f069e8204ce8c21d00a49c90dabeba00ce0145"}, - {file = "Pillow-10.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f0b4b06da13275bc02adfeb82643c4a6385bd08d26f03068c2796f60d125f6f2"}, - {file = "Pillow-10.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bc2e3069569ea9dbe88d6b8ea38f439a6aad8f6e7a6283a38edf61ddefb3a9bf"}, - {file = "Pillow-10.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:8b451d6ead6e3500b6ce5c7916a43d8d8d25ad74b9102a629baccc0808c54971"}, - {file = "Pillow-10.0.1-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:32bec7423cdf25c9038fef614a853c9d25c07590e1a870ed471f47fb80b244db"}, - {file = "Pillow-10.0.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7cf63d2c6928b51d35dfdbda6f2c1fddbe51a6bc4a9d4ee6ea0e11670dd981e"}, - {file = "Pillow-10.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f6d3d4c905e26354e8f9d82548475c46d8e0889538cb0657aa9c6f0872a37aa4"}, - {file = "Pillow-10.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:847e8d1017c741c735d3cd1883fa7b03ded4f825a6e5fcb9378fd813edee995f"}, - {file = "Pillow-10.0.1-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:7f771e7219ff04b79e231d099c0a28ed83aa82af91fd5fa9fdb28f5b8d5addaf"}, - {file = "Pillow-10.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:459307cacdd4138edee3875bbe22a2492519e060660eaf378ba3b405d1c66317"}, - {file = "Pillow-10.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b059ac2c4c7a97daafa7dc850b43b2d3667def858a4f112d1aa082e5c3d6cf7d"}, - {file = "Pillow-10.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6caf3cd38449ec3cd8a68b375e0c6fe4b6fd04edb6c9766b55ef84a6e8ddf2d"}, - {file = "Pillow-10.0.1.tar.gz", hash = "sha256:d72967b06be9300fed5cfbc8b5bafceec48bf7cdc7dab66b1d2549035287191d"}, + {file = "pillow-10.2.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:7823bdd049099efa16e4246bdf15e5a13dbb18a51b68fa06d6c1d4d8b99a796e"}, + {file = "pillow-10.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:83b2021f2ade7d1ed556bc50a399127d7fb245e725aa0113ebd05cfe88aaf588"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fad5ff2f13d69b7e74ce5b4ecd12cc0ec530fcee76356cac6742785ff71c452"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da2b52b37dad6d9ec64e653637a096905b258d2fc2b984c41ae7d08b938a67e4"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:47c0995fc4e7f79b5cfcab1fc437ff2890b770440f7696a3ba065ee0fd496563"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:322bdf3c9b556e9ffb18f93462e5f749d3444ce081290352c6070d014c93feb2"}, + {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:51f1a1bffc50e2e9492e87d8e09a17c5eea8409cda8d3f277eb6edc82813c17c"}, + {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69ffdd6120a4737710a9eee73e1d2e37db89b620f702754b8f6e62594471dee0"}, + {file = "pillow-10.2.0-cp310-cp310-win32.whl", hash = "sha256:c6dafac9e0f2b3c78df97e79af707cdc5ef8e88208d686a4847bab8266870023"}, + {file = "pillow-10.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:aebb6044806f2e16ecc07b2a2637ee1ef67a11840a66752751714a0d924adf72"}, + {file = "pillow-10.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:7049e301399273a0136ff39b84c3678e314f2158f50f517bc50285fb5ec847ad"}, + {file = "pillow-10.2.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:35bb52c37f256f662abdfa49d2dfa6ce5d93281d323a9af377a120e89a9eafb5"}, + {file = "pillow-10.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c23f307202661071d94b5e384e1e1dc7dfb972a28a2310e4ee16103e66ddb67"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:773efe0603db30c281521a7c0214cad7836c03b8ccff897beae9b47c0b657d61"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11fa2e5984b949b0dd6d7a94d967743d87c577ff0b83392f17cb3990d0d2fd6e"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:716d30ed977be8b37d3ef185fecb9e5a1d62d110dfbdcd1e2a122ab46fddb03f"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a086c2af425c5f62a65e12fbf385f7c9fcb8f107d0849dba5839461a129cf311"}, + {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c8de2789052ed501dd829e9cae8d3dcce7acb4777ea4a479c14521c942d395b1"}, + {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:609448742444d9290fd687940ac0b57fb35e6fd92bdb65386e08e99af60bf757"}, + {file = "pillow-10.2.0-cp311-cp311-win32.whl", hash = "sha256:823ef7a27cf86df6597fa0671066c1b596f69eba53efa3d1e1cb8b30f3533068"}, + {file = "pillow-10.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:1da3b2703afd040cf65ec97efea81cfba59cdbed9c11d8efc5ab09df9509fc56"}, + {file = "pillow-10.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:edca80cbfb2b68d7b56930b84a0e45ae1694aeba0541f798e908a49d66b837f1"}, + {file = "pillow-10.2.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:1b5e1b74d1bd1b78bc3477528919414874748dd363e6272efd5abf7654e68bef"}, + {file = "pillow-10.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0eae2073305f451d8ecacb5474997c08569fb4eb4ac231ffa4ad7d342fdc25ac"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7c2286c23cd350b80d2fc9d424fc797575fb16f854b831d16fd47ceec078f2c"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e23412b5c41e58cec602f1135c57dfcf15482013ce6e5f093a86db69646a5aa"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:52a50aa3fb3acb9cf7213573ef55d31d6eca37f5709c69e6858fe3bc04a5c2a2"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:127cee571038f252a552760076407f9cff79761c3d436a12af6000cd182a9d04"}, + {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8d12251f02d69d8310b046e82572ed486685c38f02176bd08baf216746eb947f"}, + {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54f1852cd531aa981bc0965b7d609f5f6cc8ce8c41b1139f6ed6b3c54ab82bfb"}, + {file = "pillow-10.2.0-cp312-cp312-win32.whl", hash = "sha256:257d8788df5ca62c980314053197f4d46eefedf4e6175bc9412f14412ec4ea2f"}, + {file = "pillow-10.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:154e939c5f0053a383de4fd3d3da48d9427a7e985f58af8e94d0b3c9fcfcf4f9"}, + {file = "pillow-10.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:f379abd2f1e3dddb2b61bc67977a6b5a0a3f7485538bcc6f39ec76163891ee48"}, + {file = "pillow-10.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8373c6c251f7ef8bda6675dd6d2b3a0fcc31edf1201266b5cf608b62a37407f9"}, + {file = "pillow-10.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:870ea1ada0899fd0b79643990809323b389d4d1d46c192f97342eeb6ee0b8483"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4b6b1e20608493548b1f32bce8cca185bf0480983890403d3b8753e44077129"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3031709084b6e7852d00479fd1d310b07d0ba82765f973b543c8af5061cf990e"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:3ff074fc97dd4e80543a3e91f69d58889baf2002b6be64347ea8cf5533188213"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:cb4c38abeef13c61d6916f264d4845fab99d7b711be96c326b84df9e3e0ff62d"}, + {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b1b3020d90c2d8e1dae29cf3ce54f8094f7938460fb5ce8bc5c01450b01fbaf6"}, + {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:170aeb00224ab3dc54230c797f8404507240dd868cf52066f66a41b33169bdbe"}, + {file = "pillow-10.2.0-cp38-cp38-win32.whl", hash = "sha256:c4225f5220f46b2fde568c74fca27ae9771536c2e29d7c04f4fb62c83275ac4e"}, + {file = "pillow-10.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:0689b5a8c5288bc0504d9fcee48f61a6a586b9b98514d7d29b840143d6734f39"}, + {file = "pillow-10.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:b792a349405fbc0163190fde0dc7b3fef3c9268292586cf5645598b48e63dc67"}, + {file = "pillow-10.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c570f24be1e468e3f0ce7ef56a89a60f0e05b30a3669a459e419c6eac2c35364"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8ecd059fdaf60c1963c58ceb8997b32e9dc1b911f5da5307aab614f1ce5c2fb"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c365fd1703040de1ec284b176d6af5abe21b427cb3a5ff68e0759e1e313a5e7e"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:70c61d4c475835a19b3a5aa42492409878bbca7438554a1f89d20d58a7c75c01"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b6f491cdf80ae540738859d9766783e3b3c8e5bd37f5dfa0b76abdecc5081f13"}, + {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d189550615b4948f45252d7f005e53c2040cea1af5b60d6f79491a6e147eef7"}, + {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:49d9ba1ed0ef3e061088cd1e7538a0759aab559e2e0a80a36f9fd9d8c0c21591"}, + {file = "pillow-10.2.0-cp39-cp39-win32.whl", hash = "sha256:babf5acfede515f176833ed6028754cbcd0d206f7f614ea3447d67c33be12516"}, + {file = "pillow-10.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:0304004f8067386b477d20a518b50f3fa658a28d44e4116970abfcd94fac34a8"}, + {file = "pillow-10.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:0fb3e7fc88a14eacd303e90481ad983fd5b69c761e9e6ef94c983f91025da869"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:322209c642aabdd6207517e9739c704dc9f9db943015535783239022002f054a"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3eedd52442c0a5ff4f887fab0c1c0bb164d8635b32c894bc1faf4c618dd89df2"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb28c753fd5eb3dd859b4ee95de66cc62af91bcff5db5f2571d32a520baf1f04"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:33870dc4653c5017bf4c8873e5488d8f8d5f8935e2f1fb9a2208c47cdd66efd2"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3c31822339516fb3c82d03f30e22b1d038da87ef27b6a78c9549888f8ceda39a"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a2b56ba36e05f973d450582fb015594aaa78834fefe8dfb8fcd79b93e64ba4c6"}, + {file = "pillow-10.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d8e6aeb9201e655354b3ad049cb77d19813ad4ece0df1249d3c793de3774f8c7"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:2247178effb34a77c11c0e8ac355c7a741ceca0a732b27bf11e747bbc950722f"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15587643b9e5eb26c48e49a7b33659790d28f190fc514a322d55da2fb5c2950e"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753cd8f2086b2b80180d9b3010dd4ed147efc167c90d3bf593fe2af21265e5a5"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7c8f97e8e7a9009bcacbe3766a36175056c12f9a44e6e6f2d5caad06dcfbf03b"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d1b35bcd6c5543b9cb547dee3150c93008f8dd0f1fef78fc0cd2b141c5baf58a"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe4c15f6c9285dc54ce6553a3ce908ed37c8f3825b5a51a15c91442bb955b868"}, + {file = "pillow-10.2.0.tar.gz", hash = "sha256:e87f0b2c78157e12d7686b27d63c070fd65d994e8ddae6f328e0dcf4a0cd007e"}, ] [package.extras] docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +typing = ["typing-extensions"] +xmp = ["defusedxml"] [[package]] name = "pkgutil-resolve-name" @@ -2507,18 +2570,18 @@ files = [ [[package]] name = "platformdirs" -version = "4.1.0" +version = "4.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, - {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] [[package]] name = "prometheus-client" @@ -2996,13 +3059,13 @@ six = ">=1.5" [[package]] name = "python-slugify" -version = "8.0.2" +version = "8.0.4" description = "A Python slugify application that also handles Unicode" optional = false python-versions = ">=3.7" files = [ - {file = "python-slugify-8.0.2.tar.gz", hash = "sha256:a1a02b127a95c124fd84f8f88be730e557fd823774bf19b1cd5e8704e2ae0e5e"}, - {file = "python_slugify-8.0.2-py2.py3-none-any.whl", hash = "sha256:428ea9b00c977b8f6c097724398f190b2c18e2a6011094d1001285875ccacdbf"}, + {file = "python-slugify-8.0.4.tar.gz", hash = "sha256:59202371d1d05b54a9e7720c5e038f928f45daaffe41dd10822f3907b937c856"}, + {file = "python_slugify-8.0.4-py2.py3-none-any.whl", hash = "sha256:276540b79961052b66b7d116620b36518847f52d5fd9e3a70164fc8c50faa6b8"}, ] [package.dependencies] @@ -3031,13 +3094,13 @@ postgresql = ["psycopg2"] [[package]] name = "pytz" -version = "2023.4" +version = "2024.1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2023.4-py2.py3-none-any.whl", hash = "sha256:f90ef520d95e7c46951105338d918664ebfd6f1d995bd7d153127ce90efafa6a"}, - {file = "pytz-2023.4.tar.gz", hash = "sha256:31d4583c4ed539cd037956140d695e42c033a19e984bfce9964a3f7d59bc2b40"}, + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, ] [[package]] @@ -3360,121 +3423,121 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "rpds-py" -version = "0.17.1" +version = "0.18.0" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.8" files = [ - {file = "rpds_py-0.17.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:4128980a14ed805e1b91a7ed551250282a8ddf8201a4e9f8f5b7e6225f54170d"}, - {file = "rpds_py-0.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ff1dcb8e8bc2261a088821b2595ef031c91d499a0c1b031c152d43fe0a6ecec8"}, - {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d65e6b4f1443048eb7e833c2accb4fa7ee67cc7d54f31b4f0555b474758bee55"}, - {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a71169d505af63bb4d20d23a8fbd4c6ce272e7bce6cc31f617152aa784436f29"}, - {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:436474f17733c7dca0fbf096d36ae65277e8645039df12a0fa52445ca494729d"}, - {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10162fe3f5f47c37ebf6d8ff5a2368508fe22007e3077bf25b9c7d803454d921"}, - {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:720215373a280f78a1814becb1312d4e4d1077b1202a56d2b0815e95ccb99ce9"}, - {file = "rpds_py-0.17.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70fcc6c2906cfa5c6a552ba7ae2ce64b6c32f437d8f3f8eea49925b278a61453"}, - {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:91e5a8200e65aaac342a791272c564dffcf1281abd635d304d6c4e6b495f29dc"}, - {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:99f567dae93e10be2daaa896e07513dd4bf9c2ecf0576e0533ac36ba3b1d5394"}, - {file = "rpds_py-0.17.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:24e4900a6643f87058a27320f81336d527ccfe503984528edde4bb660c8c8d59"}, - {file = "rpds_py-0.17.1-cp310-none-win32.whl", hash = "sha256:0bfb09bf41fe7c51413f563373e5f537eaa653d7adc4830399d4e9bdc199959d"}, - {file = "rpds_py-0.17.1-cp310-none-win_amd64.whl", hash = "sha256:20de7b7179e2031a04042e85dc463a93a82bc177eeba5ddd13ff746325558aa6"}, - {file = "rpds_py-0.17.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:65dcf105c1943cba45d19207ef51b8bc46d232a381e94dd38719d52d3980015b"}, - {file = "rpds_py-0.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:01f58a7306b64e0a4fe042047dd2b7d411ee82e54240284bab63e325762c1147"}, - {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:071bc28c589b86bc6351a339114fb7a029f5cddbaca34103aa573eba7b482382"}, - {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ae35e8e6801c5ab071b992cb2da958eee76340e6926ec693b5ff7d6381441745"}, - {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149c5cd24f729e3567b56e1795f74577aa3126c14c11e457bec1b1c90d212e38"}, - {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e796051f2070f47230c745d0a77a91088fbee2cc0502e9b796b9c6471983718c"}, - {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e820ee1004327609b28db8307acc27f5f2e9a0b185b2064c5f23e815f248f8"}, - {file = "rpds_py-0.17.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1957a2ab607f9added64478a6982742eb29f109d89d065fa44e01691a20fc20a"}, - {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8587fd64c2a91c33cdc39d0cebdaf30e79491cc029a37fcd458ba863f8815383"}, - {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4dc889a9d8a34758d0fcc9ac86adb97bab3fb7f0c4d29794357eb147536483fd"}, - {file = "rpds_py-0.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2953937f83820376b5979318840f3ee47477d94c17b940fe31d9458d79ae7eea"}, - {file = "rpds_py-0.17.1-cp311-none-win32.whl", hash = "sha256:1bfcad3109c1e5ba3cbe2f421614e70439f72897515a96c462ea657261b96518"}, - {file = "rpds_py-0.17.1-cp311-none-win_amd64.whl", hash = "sha256:99da0a4686ada4ed0f778120a0ea8d066de1a0a92ab0d13ae68492a437db78bf"}, - {file = "rpds_py-0.17.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1dc29db3900cb1bb40353772417800f29c3d078dbc8024fd64655a04ee3c4bdf"}, - {file = "rpds_py-0.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:82ada4a8ed9e82e443fcef87e22a3eed3654dd3adf6e3b3a0deb70f03e86142a"}, - {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d36b2b59e8cc6e576f8f7b671e32f2ff43153f0ad6d0201250a7c07f25d570e"}, - {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3677fcca7fb728c86a78660c7fb1b07b69b281964673f486ae72860e13f512ad"}, - {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:516fb8c77805159e97a689e2f1c80655c7658f5af601c34ffdb916605598cda2"}, - {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df3b6f45ba4515632c5064e35ca7f31d51d13d1479673185ba8f9fefbbed58b9"}, - {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a967dd6afda7715d911c25a6ba1517975acd8d1092b2f326718725461a3d33f9"}, - {file = "rpds_py-0.17.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dbbb95e6fc91ea3102505d111b327004d1c4ce98d56a4a02e82cd451f9f57140"}, - {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:02866e060219514940342a1f84303a1ef7a1dad0ac311792fbbe19b521b489d2"}, - {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2528ff96d09f12e638695f3a2e0c609c7b84c6df7c5ae9bfeb9252b6fa686253"}, - {file = "rpds_py-0.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd345a13ce06e94c753dab52f8e71e5252aec1e4f8022d24d56decd31e1b9b23"}, - {file = "rpds_py-0.17.1-cp312-none-win32.whl", hash = "sha256:2a792b2e1d3038daa83fa474d559acfd6dc1e3650ee93b2662ddc17dbff20ad1"}, - {file = "rpds_py-0.17.1-cp312-none-win_amd64.whl", hash = "sha256:292f7344a3301802e7c25c53792fae7d1593cb0e50964e7bcdcc5cf533d634e3"}, - {file = "rpds_py-0.17.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:8ffe53e1d8ef2520ebcf0c9fec15bb721da59e8ef283b6ff3079613b1e30513d"}, - {file = "rpds_py-0.17.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4341bd7579611cf50e7b20bb8c2e23512a3dc79de987a1f411cb458ab670eb90"}, - {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4eb548daf4836e3b2c662033bfbfc551db58d30fd8fe660314f86bf8510b93"}, - {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b686f25377f9c006acbac63f61614416a6317133ab7fafe5de5f7dc8a06d42eb"}, - {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4e21b76075c01d65d0f0f34302b5a7457d95721d5e0667aea65e5bb3ab415c25"}, - {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b86b21b348f7e5485fae740d845c65a880f5d1eda1e063bc59bef92d1f7d0c55"}, - {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f175e95a197f6a4059b50757a3dca33b32b61691bdbd22c29e8a8d21d3914cae"}, - {file = "rpds_py-0.17.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1701fc54460ae2e5efc1dd6350eafd7a760f516df8dbe51d4a1c79d69472fbd4"}, - {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9051e3d2af8f55b42061603e29e744724cb5f65b128a491446cc029b3e2ea896"}, - {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:7450dbd659fed6dd41d1a7d47ed767e893ba402af8ae664c157c255ec6067fde"}, - {file = "rpds_py-0.17.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5a024fa96d541fd7edaa0e9d904601c6445e95a729a2900c5aec6555fe921ed6"}, - {file = "rpds_py-0.17.1-cp38-none-win32.whl", hash = "sha256:da1ead63368c04a9bded7904757dfcae01eba0e0f9bc41d3d7f57ebf1c04015a"}, - {file = "rpds_py-0.17.1-cp38-none-win_amd64.whl", hash = "sha256:841320e1841bb53fada91c9725e766bb25009cfd4144e92298db296fb6c894fb"}, - {file = "rpds_py-0.17.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:f6c43b6f97209e370124baf2bf40bb1e8edc25311a158867eb1c3a5d449ebc7a"}, - {file = "rpds_py-0.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7d63ec01fe7c76c2dbb7e972fece45acbb8836e72682bde138e7e039906e2c"}, - {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81038ff87a4e04c22e1d81f947c6ac46f122e0c80460b9006e6517c4d842a6ec"}, - {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:810685321f4a304b2b55577c915bece4c4a06dfe38f6e62d9cc1d6ca8ee86b99"}, - {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:25f071737dae674ca8937a73d0f43f5a52e92c2d178330b4c0bb6ab05586ffa6"}, - {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa5bfb13f1e89151ade0eb812f7b0d7a4d643406caaad65ce1cbabe0a66d695f"}, - {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfe07308b311a8293a0d5ef4e61411c5c20f682db6b5e73de6c7c8824272c256"}, - {file = "rpds_py-0.17.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a000133a90eea274a6f28adc3084643263b1e7c1a5a66eb0a0a7a36aa757ed74"}, - {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d0e8a6434a3fbf77d11448c9c25b2f25244226cfbec1a5159947cac5b8c5fa4"}, - {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:efa767c220d94aa4ac3a6dd3aeb986e9f229eaf5bce92d8b1b3018d06bed3772"}, - {file = "rpds_py-0.17.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:dbc56680ecf585a384fbd93cd42bc82668b77cb525343170a2d86dafaed2a84b"}, - {file = "rpds_py-0.17.1-cp39-none-win32.whl", hash = "sha256:270987bc22e7e5a962b1094953ae901395e8c1e1e83ad016c5cfcfff75a15a3f"}, - {file = "rpds_py-0.17.1-cp39-none-win_amd64.whl", hash = "sha256:2a7b2f2f56a16a6d62e55354dd329d929560442bd92e87397b7a9586a32e3e76"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a3264e3e858de4fc601741498215835ff324ff2482fd4e4af61b46512dd7fc83"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f2f3b28b40fddcb6c1f1f6c88c6f3769cd933fa493ceb79da45968a21dccc920"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9584f8f52010295a4a417221861df9bea4c72d9632562b6e59b3c7b87a1522b7"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c64602e8be701c6cfe42064b71c84ce62ce66ddc6422c15463fd8127db3d8066"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:060f412230d5f19fc8c8b75f315931b408d8ebf56aec33ef4168d1b9e54200b1"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9412abdf0ba70faa6e2ee6c0cc62a8defb772e78860cef419865917d86c7342"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9737bdaa0ad33d34c0efc718741abaafce62fadae72c8b251df9b0c823c63b22"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9f0e4dc0f17dcea4ab9d13ac5c666b6b5337042b4d8f27e01b70fae41dd65c57"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1db228102ab9d1ff4c64148c96320d0be7044fa28bd865a9ce628ce98da5973d"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:d8bbd8e56f3ba25a7d0cf980fc42b34028848a53a0e36c9918550e0280b9d0b6"}, - {file = "rpds_py-0.17.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:be22ae34d68544df293152b7e50895ba70d2a833ad9566932d750d3625918b82"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bf046179d011e6114daf12a534d874958b039342b347348a78b7cdf0dd9d6041"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:1a746a6d49665058a5896000e8d9d2f1a6acba8a03b389c1e4c06e11e0b7f40d"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0b8bf5b8db49d8fd40f54772a1dcf262e8be0ad2ab0206b5a2ec109c176c0a4"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f7f4cb1f173385e8a39c29510dd11a78bf44e360fb75610594973f5ea141028b"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7fbd70cb8b54fe745301921b0816c08b6d917593429dfc437fd024b5ba713c58"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bdf1303df671179eaf2cb41e8515a07fc78d9d00f111eadbe3e14262f59c3d0"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad059a4bd14c45776600d223ec194e77db6c20255578bb5bcdd7c18fd169361"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3664d126d3388a887db44c2e293f87d500c4184ec43d5d14d2d2babdb4c64cad"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:698ea95a60c8b16b58be9d854c9f993c639f5c214cf9ba782eca53a8789d6b19"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:c3d2010656999b63e628a3c694f23020322b4178c450dc478558a2b6ef3cb9bb"}, - {file = "rpds_py-0.17.1-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:938eab7323a736533f015e6069a7d53ef2dcc841e4e533b782c2bfb9fb12d84b"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e626b365293a2142a62b9a614e1f8e331b28f3ca57b9f05ebbf4cf2a0f0bdc5"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:380e0df2e9d5d5d339803cfc6d183a5442ad7ab3c63c2a0982e8c824566c5ccc"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b760a56e080a826c2e5af09002c1a037382ed21d03134eb6294812dda268c811"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5576ee2f3a309d2bb403ec292d5958ce03953b0e57a11d224c1f134feaf8c40f"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3c3461ebb4c4f1bbc70b15d20b565759f97a5aaf13af811fcefc892e9197ba"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:637b802f3f069a64436d432117a7e58fab414b4e27a7e81049817ae94de45d8d"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffee088ea9b593cc6160518ba9bd319b5475e5f3e578e4552d63818773c6f56a"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ac732390d529d8469b831949c78085b034bff67f584559340008d0f6041a049"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:93432e747fb07fa567ad9cc7aaadd6e29710e515aabf939dfbed8046041346c6"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:7b7d9ca34542099b4e185b3c2a2b2eda2e318a7dbde0b0d83357a6d4421b5296"}, - {file = "rpds_py-0.17.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:0387ce69ba06e43df54e43968090f3626e231e4bc9150e4c3246947567695f68"}, - {file = "rpds_py-0.17.1.tar.gz", hash = "sha256:0210b2668f24c078307260bf88bdac9d6f1093635df5123789bfee4d8d7fc8e7"}, + {file = "rpds_py-0.18.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:5b4e7d8d6c9b2e8ee2d55c90b59c707ca59bc30058269b3db7b1f8df5763557e"}, + {file = "rpds_py-0.18.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c463ed05f9dfb9baebef68048aed8dcdc94411e4bf3d33a39ba97e271624f8f7"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01e36a39af54a30f28b73096dd39b6802eddd04c90dbe161c1b8dbe22353189f"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d62dec4976954a23d7f91f2f4530852b0c7608116c257833922a896101336c51"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd18772815d5f008fa03d2b9a681ae38d5ae9f0e599f7dda233c439fcaa00d40"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:923d39efa3cfb7279a0327e337a7958bff00cc447fd07a25cddb0a1cc9a6d2da"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39514da80f971362f9267c600b6d459bfbbc549cffc2cef8e47474fddc9b45b1"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a34d557a42aa28bd5c48a023c570219ba2593bcbbb8dc1b98d8cf5d529ab1434"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:93df1de2f7f7239dc9cc5a4a12408ee1598725036bd2dedadc14d94525192fc3"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:34b18ba135c687f4dac449aa5157d36e2cbb7c03cbea4ddbd88604e076aa836e"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c0b5dcf9193625afd8ecc92312d6ed78781c46ecbf39af9ad4681fc9f464af88"}, + {file = "rpds_py-0.18.0-cp310-none-win32.whl", hash = "sha256:c4325ff0442a12113a6379af66978c3fe562f846763287ef66bdc1d57925d337"}, + {file = "rpds_py-0.18.0-cp310-none-win_amd64.whl", hash = "sha256:7223a2a5fe0d217e60a60cdae28d6949140dde9c3bcc714063c5b463065e3d66"}, + {file = "rpds_py-0.18.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3a96e0c6a41dcdba3a0a581bbf6c44bb863f27c541547fb4b9711fd8cf0ffad4"}, + {file = "rpds_py-0.18.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30f43887bbae0d49113cbaab729a112251a940e9b274536613097ab8b4899cf6"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcb25daa9219b4cf3a0ab24b0eb9a5cc8949ed4dc72acb8fa16b7e1681aa3c58"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d68c93e381010662ab873fea609bf6c0f428b6d0bb00f2c6939782e0818d37bf"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b34b7aa8b261c1dbf7720b5d6f01f38243e9b9daf7e6b8bc1fd4657000062f2c"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e6d75ab12b0bbab7215e5d40f1e5b738aa539598db27ef83b2ec46747df90e1"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8612cd233543a3781bc659c731b9d607de65890085098986dfd573fc2befe5"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aec493917dd45e3c69d00a8874e7cbed844efd935595ef78a0f25f14312e33c6"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:661d25cbffaf8cc42e971dd570d87cb29a665f49f4abe1f9e76be9a5182c4688"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1df3659d26f539ac74fb3b0c481cdf9d725386e3552c6fa2974f4d33d78e544b"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1ce3ba137ed54f83e56fb983a5859a27d43a40188ba798993812fed73c70836"}, + {file = "rpds_py-0.18.0-cp311-none-win32.whl", hash = "sha256:69e64831e22a6b377772e7fb337533c365085b31619005802a79242fee620bc1"}, + {file = "rpds_py-0.18.0-cp311-none-win_amd64.whl", hash = "sha256:998e33ad22dc7ec7e030b3df701c43630b5bc0d8fbc2267653577e3fec279afa"}, + {file = "rpds_py-0.18.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7f2facbd386dd60cbbf1a794181e6aa0bd429bd78bfdf775436020172e2a23f0"}, + {file = "rpds_py-0.18.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1d9a5be316c15ffb2b3c405c4ff14448c36b4435be062a7f578ccd8b01f0c4d8"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd5bf1af8efe569654bbef5a3e0a56eca45f87cfcffab31dd8dde70da5982475"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5417558f6887e9b6b65b4527232553c139b57ec42c64570569b155262ac0754f"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:56a737287efecafc16f6d067c2ea0117abadcd078d58721f967952db329a3e5c"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f03bccbd8586e9dd37219bce4d4e0d3ab492e6b3b533e973fa08a112cb2ffc9"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4457a94da0d5c53dc4b3e4de1158bdab077db23c53232f37a3cb7afdb053a4e3"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0ab39c1ba9023914297dd88ec3b3b3c3f33671baeb6acf82ad7ce883f6e8e157"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9d54553c1136b50fd12cc17e5b11ad07374c316df307e4cfd6441bea5fb68496"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0af039631b6de0397ab2ba16eaf2872e9f8fca391b44d3d8cac317860a700a3f"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:84ffab12db93b5f6bad84c712c92060a2d321b35c3c9960b43d08d0f639d60d7"}, + {file = "rpds_py-0.18.0-cp312-none-win32.whl", hash = "sha256:685537e07897f173abcf67258bee3c05c374fa6fff89d4c7e42fb391b0605e98"}, + {file = "rpds_py-0.18.0-cp312-none-win_amd64.whl", hash = "sha256:e003b002ec72c8d5a3e3da2989c7d6065b47d9eaa70cd8808b5384fbb970f4ec"}, + {file = "rpds_py-0.18.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:08f9ad53c3f31dfb4baa00da22f1e862900f45908383c062c27628754af2e88e"}, + {file = "rpds_py-0.18.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0013fe6b46aa496a6749c77e00a3eb07952832ad6166bd481c74bda0dcb6d58"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e32a92116d4f2a80b629778280103d2a510a5b3f6314ceccd6e38006b5e92dcb"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e541ec6f2ec456934fd279a3120f856cd0aedd209fc3852eca563f81738f6861"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bed88b9a458e354014d662d47e7a5baafd7ff81c780fd91584a10d6ec842cb73"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2644e47de560eb7bd55c20fc59f6daa04682655c58d08185a9b95c1970fa1e07"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e8916ae4c720529e18afa0b879473049e95949bf97042e938530e072fde061d"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:465a3eb5659338cf2a9243e50ad9b2296fa15061736d6e26240e713522b6235c"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ea7d4a99f3b38c37eac212dbd6ec42b7a5ec51e2c74b5d3223e43c811609e65f"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:67071a6171e92b6da534b8ae326505f7c18022c6f19072a81dcf40db2638767c"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:41ef53e7c58aa4ef281da975f62c258950f54b76ec8e45941e93a3d1d8580594"}, + {file = "rpds_py-0.18.0-cp38-none-win32.whl", hash = "sha256:fdea4952db2793c4ad0bdccd27c1d8fdd1423a92f04598bc39425bcc2b8ee46e"}, + {file = "rpds_py-0.18.0-cp38-none-win_amd64.whl", hash = "sha256:7cd863afe7336c62ec78d7d1349a2f34c007a3cc6c2369d667c65aeec412a5b1"}, + {file = "rpds_py-0.18.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5307def11a35f5ae4581a0b658b0af8178c65c530e94893345bebf41cc139d33"}, + {file = "rpds_py-0.18.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77f195baa60a54ef9d2de16fbbfd3ff8b04edc0c0140a761b56c267ac11aa467"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39f5441553f1c2aed4de4377178ad8ff8f9d733723d6c66d983d75341de265ab"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a00312dea9310d4cb7dbd7787e722d2e86a95c2db92fbd7d0155f97127bcb40"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f2fc11e8fe034ee3c34d316d0ad8808f45bc3b9ce5857ff29d513f3ff2923a1"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:586f8204935b9ec884500498ccc91aa869fc652c40c093bd9e1471fbcc25c022"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddc2f4dfd396c7bfa18e6ce371cba60e4cf9d2e5cdb71376aa2da264605b60b9"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ddcba87675b6d509139d1b521e0c8250e967e63b5909a7e8f8944d0f90ff36f"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7bd339195d84439cbe5771546fe8a4e8a7a045417d8f9de9a368c434e42a721e"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:d7c36232a90d4755b720fbd76739d8891732b18cf240a9c645d75f00639a9024"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6b0817e34942b2ca527b0e9298373e7cc75f429e8da2055607f4931fded23e20"}, + {file = "rpds_py-0.18.0-cp39-none-win32.whl", hash = "sha256:99f70b740dc04d09e6b2699b675874367885217a2e9f782bdf5395632ac663b7"}, + {file = "rpds_py-0.18.0-cp39-none-win_amd64.whl", hash = "sha256:6ef687afab047554a2d366e112dd187b62d261d49eb79b77e386f94644363294"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ad36cfb355e24f1bd37cac88c112cd7730873f20fb0bdaf8ba59eedf8216079f"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:36b3ee798c58ace201289024b52788161e1ea133e4ac93fba7d49da5fec0ef9e"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8a2f084546cc59ea99fda8e070be2fd140c3092dc11524a71aa8f0f3d5a55ca"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e4461d0f003a0aa9be2bdd1b798a041f177189c1a0f7619fe8c95ad08d9a45d7"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8db715ebe3bb7d86d77ac1826f7d67ec11a70dbd2376b7cc214199360517b641"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:793968759cd0d96cac1e367afd70c235867831983f876a53389ad869b043c948"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66e6a3af5a75363d2c9a48b07cb27c4ea542938b1a2e93b15a503cdfa8490795"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ef0befbb5d79cf32d0266f5cff01545602344eda89480e1dd88aca964260b18"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1d4acf42190d449d5e89654d5c1ed3a4f17925eec71f05e2a41414689cda02d1"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:a5f446dd5055667aabaee78487f2b5ab72e244f9bc0b2ffebfeec79051679984"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9dbbeb27f4e70bfd9eec1be5477517365afe05a9b2c441a0b21929ee61048124"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:22806714311a69fd0af9b35b7be97c18a0fc2826e6827dbb3a8c94eac6cf7eeb"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b34ae4636dfc4e76a438ab826a0d1eed2589ca7d9a1b2d5bb546978ac6485461"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c8370641f1a7f0e0669ddccca22f1da893cef7628396431eb445d46d893e5cd"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c8362467a0fdeccd47935f22c256bec5e6abe543bf0d66e3d3d57a8fb5731863"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11a8c85ef4a07a7638180bf04fe189d12757c696eb41f310d2426895356dcf05"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b316144e85316da2723f9d8dc75bada12fa58489a527091fa1d5a612643d1a0e"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf1ea2e34868f6fbf070e1af291c8180480310173de0b0c43fc38a02929fc0e3"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e546e768d08ad55b20b11dbb78a745151acbd938f8f00d0cfbabe8b0199b9880"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4901165d170a5fde6f589acb90a6b33629ad1ec976d4529e769c6f3d885e3e80"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:618a3d6cae6ef8ec88bb76dd80b83cfe415ad4f1d942ca2a903bf6b6ff97a2da"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ed4eb745efbff0a8e9587d22a84be94a5eb7d2d99c02dacf7bd0911713ed14dd"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6c81e5f372cd0dc5dc4809553d34f832f60a46034a5f187756d9b90586c2c307"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:43fbac5f22e25bee1d482c97474f930a353542855f05c1161fd804c9dc74a09d"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d7faa6f14017c0b1e69f5e2c357b998731ea75a442ab3841c0dbbbfe902d2c4"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:08231ac30a842bd04daabc4d71fddd7e6d26189406d5a69535638e4dcb88fe76"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:044a3e61a7c2dafacae99d1e722cc2d4c05280790ec5a05031b3876809d89a5c"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f26b5bd1079acdb0c7a5645e350fe54d16b17bfc5e71f371c449383d3342e17"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:482103aed1dfe2f3b71a58eff35ba105289b8d862551ea576bd15479aba01f66"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1374f4129f9bcca53a1bba0bb86bf78325a0374577cf7e9e4cd046b1e6f20e24"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:635dc434ff724b178cb192c70016cc0ad25a275228f749ee0daf0eddbc8183b1"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:bc362ee4e314870a70f4ae88772d72d877246537d9f8cb8f7eacf10884862432"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:4832d7d380477521a8c1644bbab6588dfedea5e30a7d967b5fb75977c45fd77f"}, + {file = "rpds_py-0.18.0.tar.gz", hash = "sha256:42821446ee7a76f5d9f71f9e33a4fb2ffd724bb3e7f93386150b61a43115788d"}, ] [[package]] name = "ruamel-yaml" -version = "0.18.5" +version = "0.18.6" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" optional = false python-versions = ">=3.7" files = [ - {file = "ruamel.yaml-0.18.5-py3-none-any.whl", hash = "sha256:a013ac02f99a69cdd6277d9664689eb1acba07069f912823177c5eced21a6ada"}, - {file = "ruamel.yaml-0.18.5.tar.gz", hash = "sha256:61917e3a35a569c1133a8f772e1226961bf5a1198bea7e23f06a0841dea1ab0e"}, + {file = "ruamel.yaml-0.18.6-py3-none-any.whl", hash = "sha256:57b53ba33def16c4f3d807c0ccbc00f8a6081827e81ba2491691b76882d0c636"}, + {file = "ruamel.yaml-0.18.6.tar.gz", hash = "sha256:8b27e6a217e786c6fbe5634d8f3f11bc63e0f80f6a5890f28863d9c45aac311b"}, ] [package.dependencies] @@ -3569,18 +3632,18 @@ paramiko = "*" [[package]] name = "setuptools" -version = "69.0.3" +version = "69.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, - {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, + {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, + {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -3659,13 +3722,13 @@ social-auth-core = ">=4.4.1" [[package]] name = "social-auth-core" -version = "4.5.2" +version = "4.5.3" description = "Python social authentication made simple." optional = false python-versions = ">=3.8" files = [ - {file = "social-auth-core-4.5.2.tar.gz", hash = "sha256:e313bfd09ad78a4af44c5630f3770776b24f468e9a5b71160ade9583efa43f8a"}, - {file = "social_auth_core-4.5.2-py3-none-any.whl", hash = "sha256:47b48be9b6da59aed4792d805cc25f4c7b7f57e0bbf86d659b5df0ff3f253109"}, + {file = "social-auth-core-4.5.3.tar.gz", hash = "sha256:9d9b51b7ce2ccd0b7139e6b7f52a32cb922726de819fb13babe35f12ae89852a"}, + {file = "social_auth_core-4.5.3-py3-none-any.whl", hash = "sha256:8d16e66eb97bb7be43a023d6efa16628cdc94cefd8d8053930c98a0f676867e7"}, ] [package.dependencies] @@ -3863,13 +3926,13 @@ files = [ [[package]] name = "tzdata" -version = "2023.4" +version = "2024.1" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"}, - {file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"}, + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, ] [[package]] @@ -3912,38 +3975,40 @@ files = [ [[package]] name = "watchdog" -version = "3.0.0" +version = "4.0.0" description = "Filesystem events monitoring" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41"}, - {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397"}, - {file = "watchdog-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7"}, - {file = "watchdog-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8"}, - {file = "watchdog-3.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100"}, - {file = "watchdog-3.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346"}, - {file = "watchdog-3.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33"}, - {file = "watchdog-3.0.0-py3-none-win32.whl", hash = "sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f"}, - {file = "watchdog-3.0.0-py3-none-win_amd64.whl", hash = "sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c"}, - {file = "watchdog-3.0.0-py3-none-win_ia64.whl", hash = "sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759"}, - {file = "watchdog-3.0.0.tar.gz", hash = "sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"}, + {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, + {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, + {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, + {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, + {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, + {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, + {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, ] [package.extras] @@ -4041,13 +4106,13 @@ files = [ [[package]] name = "yamllint" -version = "1.33.0" +version = "1.34.0" description = "A linter for YAML files." optional = false python-versions = ">=3.8" files = [ - {file = "yamllint-1.33.0-py3-none-any.whl", hash = "sha256:28a19f5d68d28d8fec538a1db21bb2d84c7dc2e2ea36266da8d4d1c5a683814d"}, - {file = "yamllint-1.33.0.tar.gz", hash = "sha256:2dceab9ef2d99518a2fcf4ffc964d44250ac4459be1ba3ca315118e4a1a81f7d"}, + {file = "yamllint-1.34.0-py3-none-any.whl", hash = "sha256:33b813f6ff2ffad2e57a288281098392b85f7463ce1f3d5cd45aa848b916a806"}, + {file = "yamllint-1.34.0.tar.gz", hash = "sha256:7f0a6a41e8aab3904878da4ae34b6248b6bc74634e0d3a90f0fb2d7e723a3d4f"}, ] [package.dependencies] diff --git a/pyproject.toml b/pyproject.toml index 1686367b..4d6dc55a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nautobot-device-onboarding" -version = "3.0.2a0" +version = "3.0.2a1" description = "A app for Nautobot to easily onboard new devices." authors = ["Network to Code, LLC "] license = "Apache-2.0" @@ -94,10 +94,20 @@ no-docstring-rgx="^(_|test_|Meta$)" # Line length is enforced by Black, so pylint doesn't need to check it. # Pylint and Black disagree about how to format multi-line arrays; Black wins. disable = """, + arguments-differ, line-too-long, duplicate-code, - too-many-lines, + logging-fstring-interpolation, + too-few-public-methods, too-many-ancestors, + too-many-arguments, + too-many-branches, + too-many-lines, + too-many-locals, + too-many-public-methods, + too-many-statements, + unused-argument, + unused-import, """ [tool.pylint.miscellaneous] From 7bf81742a8d22f005d15d3d506623f09cb80d58a Mon Sep 17 00:00:00 2001 From: David Cates Date: Wed, 14 Feb 2024 17:34:45 -0700 Subject: [PATCH 056/225] black --- .../diffsync/adapters/network_importer_adapters.py | 1 + .../diffsync/models/network_importer_models.py | 1 + nautobot_device_onboarding/jobs.py | 4 +++- 3 files changed, 5 insertions(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index da88a18c..817ef323 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -142,6 +142,7 @@ def load(self): class MacUnixExpandedUppercase(mac_unix_expanded): """Mac Unix Expanded Uppercase.""" + word_fmt = "%.2X" diff --git a/nautobot_device_onboarding/diffsync/models/network_importer_models.py b/nautobot_device_onboarding/diffsync/models/network_importer_models.py index 76c2f0ce..5e8d27ba 100644 --- a/nautobot_device_onboarding/diffsync/models/network_importer_models.py +++ b/nautobot_device_onboarding/diffsync/models/network_importer_models.py @@ -293,6 +293,7 @@ def update(self, attrs): class NetworkImporterLagToInterface(DiffSyncModel): """Shared data model representing a LagToInterface.""" + _modelname = "lag_to_interface" _identifiers = ("device__name", "name") _attributes = ("lag__interface__name",) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index c9107af0..64de130c 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -16,6 +16,7 @@ from nautobot_ssot.jobs.base import DataSource from nornir import InitNornir from nornir.core.plugins.inventory import InventoryPluginRegister + # from nornir.core.task import Result, Task # from nornir_nautobot.exceptions import NornirNautobotException from nornir_netmiko import netmiko_send_command @@ -554,6 +555,7 @@ def run(self, *args, **kwargs): class CommandGetterNetworkImporter(Job): """Simple Job to Execute Show Command.""" + debug = BooleanVar(description="Enable for more verbose logging.") namespace = ObjectVar( model=Namespace, required=True, description="The namespace for all IP addresses created or updated in the sync." @@ -615,7 +617,7 @@ def run(self, *args, **kwargs): # all_results = format_ni_data_cisco_ios(command=command,command_result=command_result) for host_name, result in command_result.items(): if command_result.failed: - failed_results = {host_name : { "Failed": True, "subtask_result": result.result }} + failed_results = {host_name: {"Failed": True, "subtask_result": result.result}} return failed_results if host_name not in all_results: all_results[host_name] = {"interfaces": {}, "serial": ""} From f96216ac669ccccbe365bbb7a223af3a1608146d Mon Sep 17 00:00:00 2001 From: David Cates Date: Wed, 14 Feb 2024 17:44:38 -0700 Subject: [PATCH 057/225] simplify if statment --- nautobot_device_onboarding/jobs.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 64de130c..d260719d 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -642,10 +642,7 @@ def run(self, *args, **kwargs): ip_address = interface_info.get("ip_address") mask_length = interface_info.get("prefix_length") - if link_status == "up": - link_status = True - else: - link_status = False + link_status = bool(link_status == "up") interface_type = normalize_interface_type(hardware_type) @@ -696,8 +693,8 @@ def run(self, *args, **kwargs): all_results[host_name]["interfaces"][interface_name][ "tagged_vlans" ] = tagged_vlans_info - else: - self.logger.info(f"Interface {interface_name} not found in interfaces list.") + else: + self.logger.info(f"Interface {interface_name} not found in interfaces list.") except Exception as err: # pylint: disable=broad-exception-caught self.logger.info("Error: %s", err) From 6a2fa1380ab687df311df20a69be85a618b0681a Mon Sep 17 00:00:00 2001 From: David Cates Date: Wed, 14 Feb 2024 17:47:08 -0700 Subject: [PATCH 058/225] formatting --- nautobot_device_onboarding/jobs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index d260719d..531ee437 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -642,7 +642,7 @@ def run(self, *args, **kwargs): ip_address = interface_info.get("ip_address") mask_length = interface_info.get("prefix_length") - link_status = bool(link_status == "up") + link_status = bool(link_status == "up") interface_type = normalize_interface_type(hardware_type) From 615f0773e84c074dd6f69f03fe5a4dbe667c5c5a Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Wed, 14 Feb 2024 21:58:31 -0600 Subject: [PATCH 059/225] updates --- nautobot_device_onboarding/jobs.py | 28 +- .../nornir_plays/processor.py | 86 +++++- nautobot_device_onboarding/utils/formatter.py | 61 ++++ poetry.lock | 265 +++++++++++++++++- pyproject.toml | 1 + 5 files changed, 420 insertions(+), 21 deletions(-) mode change 100644 => 100755 nautobot_device_onboarding/jobs.py mode change 100644 => 100755 nautobot_device_onboarding/utils/formatter.py mode change 100644 => 100755 poetry.lock mode change 100644 => 100755 pyproject.toml diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py old mode 100644 new mode 100755 index 531ee437..80424bc2 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -11,17 +11,9 @@ from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from nautobot.extras.models import Role, SecretsGroup, SecretsGroupAssociation, Status, Tag from nautobot.ipam.models import Namespace -from nautobot_plugin_nornir.constants import NORNIR_SETTINGS -from nautobot_plugin_nornir.plugins.inventory.nautobot_orm import NautobotORMInventory -from nautobot_ssot.jobs.base import DataSource -from nornir import InitNornir -from nornir.core.plugins.inventory import InventoryPluginRegister - -# from nornir.core.task import Result, Task -# from nornir_nautobot.exceptions import NornirNautobotException -from nornir_netmiko import netmiko_send_command # from nautobot_device_onboarding.constants import PLATFORM_COMMAND_MAP +from nautobot_device_onboarding.constants import PLATFORM_COMMAND_MAP from nautobot_device_onboarding.diffsync.adapters.network_importer_adapters import ( NetworkImporterNautobotAdapter, NetworkImporterNetworkAdapter, @@ -36,7 +28,7 @@ from nautobot_device_onboarding.nornir_plays.command_getter import netmiko_send_commands from nautobot_device_onboarding.nornir_plays.empty_inventory import EmptyInventory from nautobot_device_onboarding.nornir_plays.logger import NornirLogger -from nautobot_device_onboarding.nornir_plays.processor import ProcessorDO +from nautobot_device_onboarding.nornir_plays.processor import ProcessorDO, ProcessorDONew from nautobot_device_onboarding.utils.formatter import ( normalize_interface_name, normalize_interface_type, @@ -44,6 +36,17 @@ ) from nautobot_device_onboarding.utils.helper import get_job_filter from nautobot_device_onboarding.utils.inventory_creator import _set_inventory +from nautobot_plugin_nornir.constants import NORNIR_SETTINGS +from nautobot_plugin_nornir.plugins.inventory.nautobot_orm import NautobotORMInventory +from nautobot_ssot.jobs.base import DataSource +from nornir import InitNornir +from nornir.core.plugins.inventory import InventoryPluginRegister +from nornir.core.task import Result, Task +from nornir_nautobot.exceptions import NornirNautobotException + +# from nornir.core.task import Result, Task +# from nornir_nautobot.exceptions import NornirNautobotException +from nornir_netmiko import netmiko_send_command InventoryPluginRegister.register("nautobot-inventory", NautobotORMInventory) InventoryPluginRegister.register("empty-inventory", EmptyInventory) @@ -533,13 +536,14 @@ def run(self, *args, **kwargs): "plugin": "empty-inventory", }, ) as nornir_obj: - nr_with_processors = nornir_obj.with_processors([ProcessorDO(logger, compiled_results)]) + nr_with_processors = nornir_obj.with_processors([ProcessorDONew(logger, compiled_results)]) for entered_ip in self.ip_addresses: single_host_inventory_constructed = _set_inventory( entered_ip, self.platform, self.port, self.secrets_group ) nr_with_processors.inventory.hosts.update(single_host_inventory_constructed) - nr_with_processors.run(task=netmiko_send_commands) + nr_result_temp = nr_with_processors.run(task=netmiko_send_commands) + print(nr_result_temp) final_result = self._process_result(compiled_results, self.ip_addresses) # Remove before final merge # diff --git a/nautobot_device_onboarding/nornir_plays/processor.py b/nautobot_device_onboarding/nornir_plays/processor.py index 5dc2d5df..aa2b7768 100755 --- a/nautobot_device_onboarding/nornir_plays/processor.py +++ b/nautobot_device_onboarding/nornir_plays/processor.py @@ -2,13 +2,17 @@ from typing import Dict +from nautobot_device_onboarding.utils.formatter import ( + format_ob_data, + format_ob_data_ios, + format_ob_data_junos, + format_ob_data_nxos, +) from nornir.core.inventory import Host from nornir.core.task import AggregatedResult, MultiResult, Task from nornir_nautobot.exceptions import NornirNautobotException from nornir_nautobot.plugins.processors import BaseLoggingProcessor -from nautobot_device_onboarding.utils.formatter import format_ob_data_ios, format_ob_data_junos, format_ob_data_nxos - class ProcessorDO(BaseLoggingProcessor): """Processor class for Device Onboarding jobs.""" @@ -101,3 +105,81 @@ def subtask_instance_started(self, task: Task, host: Host) -> None: self.logger.info(f"Subtask starting {task.name}.", extra={"object": task.host}) self.data[task.name] = {} # self.data[task.name][host.name] = {"started": True} + + +class ProcessorDONew(BaseLoggingProcessor): + """Processor class for Device Onboarding jobs.""" + + def __init__(self, logger, command_outputs): + """Set logging facility.""" + self.logger = logger + self.data: Dict = command_outputs + + # def task_started(self, task: Task) -> None: + # """Boilerplate Nornir processor for task_started.""" + # self.data[task.name] = {} + # # self.data[task.name]["started"] = True + # self.logger.info(f"Task Name: {task.name} started") + + # def task_completed(self, task: Task, result: AggregatedResult) -> None: + # """Boilerplate Nornir processor for task_instance_completed.""" + # # self.data[task.name]["completed"] = True + # self.logger.info(f"Task Name: {task.name} completed") + + # def task_instance_started(self, task: Task, host: Host) -> None: + # """Processor for Logging on Task Start.""" + # self.logger.info(f"Starting {task.name}.", extra={"object": task.host}) + # self.data[host.name] = {task.name: ""} + # # self.data[task.name][host.name] = {} + + def task_instance_completed(self, task: Task, host: Host, result: MultiResult) -> None: + """Nornir processor task completion for OS upgrades. + + Args: + task (Task): Nornir task individual object + host (Host): Host object with Nornir + result (MultiResult): Result from Nornir task + + Returns: + None + """ + # Complex logic to see if the task exception is expected, which is depicted by + # a sub task raising a NornirNautobotException. + if result.failed: + for level_1_result in result: + if hasattr(level_1_result, "exception") and hasattr(level_1_result.exception, "result"): + for level_2_result in level_1_result.exception.result: # type: ignore + if isinstance(level_2_result.exception, NornirNautobotException): + return + self.logger.critical(f"{task.name} failed: {result.exception}", extra={"object": task.host}) + else: + self.logger.info(f"task_instance_completed Task Name: {task.name} Task Result: {result.result}", extra={"object": task.host}) + + # self.data[host.name][task.name] = { + # "completed": True, + # "failed": result.failed, + # } + + def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult) -> None: + """Processor for Logging on SubTask Completed.""" + self.logger.info(f"subtask_instance_completed Subtask completed {task.name}.", extra={"object": task.host}) + self.logger.info(f"subtask_instance_completed Subtask result {result.result}.", extra={"object": task.host}) + + self.data[host.name][task.name].update({ + "failed": result.failed, + "subtask_result": result.result, + }) + + formatted_data = format_ob_data(host, result) + print(formatted_data) + self.data[host.name][task.name] = formatted_data + + def subtask_instance_started(self, task: Task, host: Host) -> None: # show command start + """Processor for Logging on SubTask Start.""" + self.logger.info(f"subtask_instance_started Subtask starting {task.name}.", extra={"object": task.host}) + self.data[host.name] = { + "platform": host.platform, + "manufacturer": host.platform.split("_")[0].title() if host.platform else "PLACEHOLDER", + "network_driver": host.platform, + } + self.data[host.name].update({task.name:{}}) diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py old mode 100644 new mode 100755 index 369fb700..04bd0b4f --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -1,4 +1,64 @@ """Formatter.""" +import os + +import yaml +from django.template import engines +from jdiff import extract_data_from_json +from jinja2 import FileSystemLoader +from jinja2.sandbox import SandboxedEnvironment + +DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), "command_mappers")) + + +def load_yaml_datafile(filename, config=None): + """Get the contents of the given YAML data file. + + Args: + filename (str): Filename within the 'data' directory. + config (dict): Data for Jinja2 templating. + """ + file_path = os.path.join(DATA_DIR, filename) + if not os.path.isfile(file_path): + raise RuntimeError(f"No data file found at {file_path}") + if not config: + config = {} + jinja_env = SandboxedEnvironment( + loader=FileSystemLoader(DATA_DIR), autoescape=True, trim_blocks=True, lstrip_blocks=False + ) + jinja_env.filters = engines["jinja"].env.filters + template = jinja_env.get_template(filename) + print(template) + populated = template.render(config) + return yaml.safe_load(populated) + + +def format_ob_data(host, multi_result): + """_summary_ + + Args: + host (_type_): _description_ + result (_type_): _description_ + + result is a MultiResult Nornir Object for a single host. + """ + default_dict = { + "hostname": "", + "serial": "", + "device_type": "", + "mgmt_interface": "", + "manufacturer": "", + "platform": "", + "network_driver": "", + "mask_length": 0, + } + host_platform = host.platform + if host_platform == "cisco_xe": + host_platform = "cisco_ios" + command_jpaths = load_yaml_datafile(f"{host_platform}.yml", config={}) + for default_dict_field, command_info in command_jpaths['device_onboarding'].items(): + extracted_value = extract_data_from_json(multi_result.result, command_info['jpath'], exclude=None) + default_dict[default_dict_field] = extracted_value + return default_dict from nautobot_device_onboarding.constants import ( CISCO_INTERFACE_ABBREVIATIONS, @@ -64,6 +124,7 @@ def format_ob_data_nxos(host, result): break return formatted_data + return formatted_data def format_ob_data_junos(host, result): """Format the data for onboarding Juniper JUNOS devices.""" diff --git a/poetry.lock b/poetry.lock old mode 100644 new mode 100755 index 80918504..0dfe16ad --- a/poetry.lock +++ b/poetry.lock @@ -1,9 +1,10 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand. [[package]] name = "amqp" version = "5.2.0" description = "Low-level AMQP client for Python (fork of amqplib)." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -18,6 +19,7 @@ vine = ">=5.0.0,<6.0.0" name = "aniso8601" version = "7.0.0" description = "A library for parsing ISO 8601 strings." +category = "main" optional = false python-versions = "*" files = [ @@ -29,6 +31,7 @@ files = [ name = "anyio" version = "4.2.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -51,6 +54,7 @@ trio = ["trio (>=0.23)"] name = "asgiref" version = "3.7.2" description = "ASGI specs, helper code, and adapters" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -68,6 +72,7 @@ tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] name = "astroid" version = "2.15.8" description = "An abstract syntax tree for Python with inference support." +category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -87,6 +92,7 @@ wrapt = [ name = "async-timeout" version = "4.0.3" description = "Timeout context manager for asyncio programs" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -98,6 +104,7 @@ files = [ name = "attrs" version = "23.2.0" description = "Classes Without Boilerplate" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -132,6 +139,7 @@ tomli = "*" name = "backports-zoneinfo" version = "0.2.1" description = "Backport of the standard library zoneinfo module" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -163,6 +171,7 @@ tzdata = ["tzdata"] name = "bandit" version = "1.7.7" description = "Security oriented static analyser for python code." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -186,6 +195,7 @@ yaml = ["PyYAML"] name = "bcrypt" version = "4.1.2" description = "Modern password hashing for your software and your servers" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -226,6 +236,7 @@ typecheck = ["mypy"] name = "billiard" version = "4.2.0" description = "Python multiprocessing fork with improvements and bugfixes" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -237,6 +248,7 @@ files = [ name = "black" version = "24.2.0" description = "The uncompromising code formatter." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -283,6 +295,7 @@ uvloop = ["uvloop (>=0.15.2)"] name = "celery" version = "5.3.6" description = "Distributed Task Queue." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -339,6 +352,7 @@ zstd = ["zstandard (==0.22.0)"] name = "certifi" version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -350,6 +364,7 @@ files = [ name = "cffi" version = "1.16.0" description = "Foreign Function Interface for Python calling C code." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -414,6 +429,7 @@ pycparser = "*" name = "charset-normalizer" version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -513,6 +529,7 @@ files = [ name = "click" version = "8.1.7" description = "Composable command line interface toolkit" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -527,6 +544,7 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "click-didyoumean" version = "0.3.0" description = "Enables git-like *did-you-mean* feature in click" +category = "main" optional = false python-versions = ">=3.6.2,<4.0.0" files = [ @@ -541,6 +559,7 @@ click = ">=7" name = "click-plugins" version = "1.1.1" description = "An extension module for click to enable registering CLI commands via setuptools entry-points." +category = "main" optional = false python-versions = "*" files = [ @@ -558,6 +577,7 @@ dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] name = "click-repl" version = "0.3.0" description = "REPL plugin for Click" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -576,6 +596,7 @@ testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -587,6 +608,7 @@ files = [ name = "coverage" version = "7.4.1" description = "Code coverage measurement for Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -651,6 +673,7 @@ toml = ["tomli"] name = "cron-descriptor" version = "1.4.3" description = "A Python library that converts cron expressions into human readable strings." +category = "main" optional = false python-versions = "*" files = [ @@ -665,6 +688,7 @@ dev = ["polib"] name = "cryptography" version = "42.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -715,10 +739,30 @@ ssh = ["bcrypt (>=3.1.5)"] test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] +[[package]] +name = "deepdiff" +version = "6.7.1" +description = "Deep Difference and Search of any Python object/data. Recreate objects by adding adding deltas to each other." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "deepdiff-6.7.1-py3-none-any.whl", hash = "sha256:58396bb7a863cbb4ed5193f548c56f18218060362311aa1dc36397b2f25108bd"}, + {file = "deepdiff-6.7.1.tar.gz", hash = "sha256:b367e6fa6caac1c9f500adc79ada1b5b1242c50d5f716a1a4362030197847d30"}, +] + +[package.dependencies] +ordered-set = ">=4.0.2,<4.2.0" + +[package.extras] +cli = ["click (==8.1.3)", "pyyaml (==6.0.1)"] +optimize = ["orjson"] + [[package]] name = "defusedxml" version = "0.7.1" description = "XML bomb protection for Python stdlib modules" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -730,6 +774,7 @@ files = [ name = "diffsync" version = "1.10.0" description = "Library to easily sync/diff/update 2 different data sources" +category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -751,6 +796,7 @@ redis = ["redis (>=4.3,<5.0)"] name = "dill" version = "0.3.8" description = "serialize all of Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -766,6 +812,7 @@ profile = ["gprof2dot (>=2022.7.29)"] name = "django" version = "3.2.24" description = "A high-level Python Web framework that encourages rapid development and clean, pragmatic design." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -786,6 +833,7 @@ bcrypt = ["bcrypt"] name = "django-ajax-tables" version = "1.1.1" description = "Django tag for ajax-enabled tables" +category = "main" optional = false python-versions = "*" files = [ @@ -797,6 +845,7 @@ files = [ name = "django-celery-beat" version = "2.5.0" description = "Database-backed Periodic Tasks." +category = "main" optional = false python-versions = "*" files = [ @@ -817,6 +866,7 @@ tzdata = "*" name = "django-celery-results" version = "2.4.0" description = "Celery result backends for Django." +category = "main" optional = false python-versions = "*" files = [ @@ -831,6 +881,7 @@ celery = ">=5.2.3,<6.0" name = "django-constance" version = "2.9.1" description = "Django live settings with pluggable backends, including Redis." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -849,6 +900,7 @@ redis = ["redis"] name = "django-cors-headers" version = "4.2.0" description = "django-cors-headers is a Django application for handling the server headers required for Cross-Origin Resource Sharing (CORS)." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -863,6 +915,7 @@ Django = ">=3.2" name = "django-db-file-storage" version = "0.5.6.1" description = "Custom FILE_STORAGE for Django. Saves files in your database instead of your file system." +category = "main" optional = false python-versions = "*" files = [ @@ -877,6 +930,7 @@ Django = "*" name = "django-debug-toolbar" version = "4.3.0" description = "A configurable set of panels that display various debug information about the current request/response." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -892,6 +946,7 @@ sqlparse = ">=0.2" name = "django-extensions" version = "3.2.3" description = "Extensions for Django" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -906,6 +961,7 @@ Django = ">=3.2" name = "django-filter" version = "23.1" description = "Django-filter is a reusable Django application for allowing users to filter querysets dynamically." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -920,6 +976,7 @@ Django = ">=3.2" name = "django-health-check" version = "3.17.0" description = "Run checks on services like databases, queue servers, celery processes, etc." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -938,6 +995,7 @@ test = ["celery", "pytest", "pytest-cov", "pytest-django", "redis"] name = "django-jinja" version = "2.10.2" description = "Jinja2 templating language integrated in Django." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -953,6 +1011,7 @@ jinja2 = ">=3" name = "django-picklefield" version = "3.1" description = "Pickled object field for Django" +category = "main" optional = false python-versions = ">=3" files = [ @@ -970,6 +1029,7 @@ tests = ["tox"] name = "django-prometheus" version = "2.3.1" description = "Django middlewares to monitor your application with Prometheus.io." +category = "main" optional = false python-versions = "*" files = [ @@ -984,6 +1044,7 @@ prometheus-client = ">=0.7" name = "django-redis" version = "5.3.0" description = "Full featured redis cache backend for Django." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1019,6 +1080,7 @@ sqlparse = "*" name = "django-tables2" version = "2.6.0" description = "Table/data-grid framework for Django" +category = "main" optional = false python-versions = "*" files = [ @@ -1036,6 +1098,7 @@ tablib = ["tablib"] name = "django-taggit" version = "4.0.0" description = "django-taggit is a reusable Django application for simple tagging." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1050,6 +1113,7 @@ Django = ">=3.2" name = "django-timezone-field" version = "5.1" description = "A Django app providing DB, form, and REST framework fields for zoneinfo and pytz timezone objects." +category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -1059,13 +1123,14 @@ files = [ [package.dependencies] "backports.zoneinfo" = {version = ">=0.2.1,<0.3.0", markers = "python_version < \"3.9\""} -Django = ">=2.2,<3.0.dev0 || >=3.2.dev0,<5.0" +Django = ">=2.2,<3.0.0 || >=3.2.0,<5.0" pytz = "*" [[package]] name = "django-tree-queries" version = "0.16.1" description = "Tree queries with explicit opt-in, without configurability" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1080,6 +1145,7 @@ tests = ["coverage"] name = "django-webserver" version = "1.2.0" description = "Django management commands for production webservers" +category = "main" optional = false python-versions = "*" files = [ @@ -1101,6 +1167,7 @@ waitress = ["waitress"] name = "djangorestframework" version = "3.14.0" description = "Web APIs for Django, made easy." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1116,6 +1183,7 @@ pytz = "*" name = "drf-react-template-framework" version = "0.0.17" description = "Django REST Framework plugin that creates form schemas for react-jsonschema-form" +category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -1130,6 +1198,7 @@ djangorestframework = ">=3.12.0,<4.0.0" name = "drf-spectacular" version = "0.26.3" description = "Sane and flexible OpenAPI 3 schema generation for Django REST framework" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1154,6 +1223,7 @@ sidecar = ["drf-spectacular-sidecar"] name = "drf-spectacular-sidecar" version = "2024.2.1" description = "Serve self-contained distribution builds of Swagger UI and Redoc with Django" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1168,6 +1238,7 @@ Django = ">=2.2" name = "emoji" version = "2.8.0" description = "Emoji for Python" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1182,6 +1253,7 @@ dev = ["coverage", "coveralls", "pytest"] name = "exceptiongroup" version = "1.2.0" description = "Backport of PEP 654 (exception groups)" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1196,6 +1268,7 @@ test = ["pytest (>=6)"] name = "flake8" version = "5.0.4" description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" optional = false python-versions = ">=3.6.1" files = [ @@ -1212,6 +1285,7 @@ pyflakes = ">=2.5.0,<2.6.0" name = "future" version = "0.18.3" description = "Clean single-source support for Python 3 and 2" +category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1222,6 +1296,7 @@ files = [ name = "ghp-import" version = "2.1.0" description = "Copy your docs directly to the gh-pages branch." +category = "dev" optional = false python-versions = "*" files = [ @@ -1239,6 +1314,7 @@ dev = ["flake8", "markdown", "twine", "wheel"] name = "gitdb" version = "4.0.11" description = "Git Object Database" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1253,6 +1329,7 @@ smmap = ">=3.0.1,<6" name = "gitpython" version = "3.1.41" description = "GitPython is a Python library used to interact with Git repositories" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1281,6 +1358,7 @@ files = [ name = "graphene" version = "2.1.9" description = "GraphQL Framework for Python" +category = "main" optional = false python-versions = "*" files = [ @@ -1303,6 +1381,7 @@ test = ["coveralls", "fastdiff (==0.2.0)", "iso8601", "mock", "promise", "pytest name = "graphene-django" version = "2.16.0" description = "Graphene Django integration" +category = "main" optional = false python-versions = "*" files = [ @@ -1327,6 +1406,7 @@ test = ["coveralls", "django-filter (>=2)", "djangorestframework (>=3.6.3)", "mo name = "graphene-django-optimizer" version = "0.8.0" description = "Optimize database access inside graphene queries." +category = "main" optional = false python-versions = "*" files = [ @@ -1337,6 +1417,7 @@ files = [ name = "graphql-core" version = "2.3.2" description = "GraphQL implementation for Python" +category = "main" optional = false python-versions = "*" files = [ @@ -1357,6 +1438,7 @@ test = ["coveralls (==1.11.1)", "cython (==0.29.17)", "gevent (==1.5.0)", "pyann name = "graphql-relay" version = "2.0.1" description = "Relay implementation for Python" +category = "main" optional = false python-versions = "*" files = [ @@ -1373,6 +1455,7 @@ six = ">=1.12" name = "griffe" version = "0.40.1" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1387,6 +1470,7 @@ colorama = ">=0.4" name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1398,6 +1482,7 @@ files = [ name = "httpcore" version = "0.17.3" description = "A minimal low-level HTTP client." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1409,16 +1494,17 @@ files = [ anyio = ">=3.0,<5.0" certifi = "*" h11 = ">=0.13,<0.15" -sniffio = "==1.*" +sniffio = ">=1.0.0,<2.0.0" [package.extras] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] [[package]] name = "httpx" version = "0.24.1" description = "The next generation HTTP client." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1434,14 +1520,15 @@ sniffio = "*" [package.extras] brotli = ["brotli", "brotlicffi"] -cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] [[package]] name = "idna" version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1453,6 +1540,7 @@ files = [ name = "importlib-metadata" version = "4.13.0" description = "Read metadata from Python packages" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1472,6 +1560,7 @@ testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packag name = "importlib-resources" version = "5.13.0" description = "Read resources from Python packages" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1490,6 +1579,7 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", name = "inflection" version = "0.5.1" description = "A port of Ruby on Rails inflector to Python" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1501,6 +1591,7 @@ files = [ name = "invoke" version = "2.2.0" description = "Pythonic task execution" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1512,6 +1603,7 @@ files = [ name = "isort" version = "5.13.2" description = "A Python utility / library to sort Python imports." +category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -1522,10 +1614,27 @@ files = [ [package.extras] colors = ["colorama (>=0.4.6)"] +[[package]] +name = "jdiff" +version = "0.0.6" +description = "A light-weight library to compare structured output from network devices show commands." +category = "main" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "jdiff-0.0.6-py3-none-any.whl", hash = "sha256:346798820be11ae2485ce2a29eb9a9cc0ddaa23388319566d367be18730cbaa8"}, + {file = "jdiff-0.0.6.tar.gz", hash = "sha256:b42d26947d24fe7c297c8e3d38709b6e78823a41dcf50417d6be916d7d49be45"}, +] + +[package.dependencies] +deepdiff = ">=5.5.0,<7.0" +jmespath = ">=1.0.1,<2.0.0" + [[package]] name = "jinja2" version = "3.1.3" description = "A very fast and expressive template engine." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1539,10 +1648,23 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] + [[package]] name = "jsonschema" version = "4.18.6" description = "An implementation of JSON Schema validation for Python" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1566,6 +1688,7 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- name = "jsonschema-specifications" version = "2023.12.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1581,6 +1704,7 @@ referencing = ">=0.31.0" name = "junos-eznc" version = "2.7.0" description = "Junos 'EZ' automation for non-programmers" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1605,6 +1729,7 @@ yamlordereddictloader = "*" name = "kombu" version = "5.3.5" description = "Messaging library for Python." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1639,6 +1764,7 @@ zookeeper = ["kazoo (>=2.8.0)"] name = "lazy-object-proxy" version = "1.10.0" description = "A fast and thorough lazy object proxy." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1685,6 +1811,7 @@ files = [ name = "lxml" version = "5.1.0" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1778,6 +1905,7 @@ source = ["Cython (>=3.0.7)"] name = "markdown" version = "3.3.7" description = "Python implementation of Markdown." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1795,6 +1923,7 @@ testing = ["coverage", "pyyaml"] name = "markdown-it-py" version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1819,6 +1948,7 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "markupsafe" version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1888,6 +2018,7 @@ files = [ name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1899,6 +2030,7 @@ files = [ name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1910,6 +2042,7 @@ files = [ name = "mergedeep" version = "1.3.4" description = "A deep merge function for 🐍." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1921,6 +2054,7 @@ files = [ name = "mkdocs" version = "1.5.2" description = "Project documentation with Markdown." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1952,6 +2086,7 @@ min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-imp name = "mkdocs-autorefs" version = "0.5.0" description = "Automatically link across pages in MkDocs." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1967,6 +2102,7 @@ mkdocs = ">=1.1" name = "mkdocs-material" version = "9.1.15" description = "Documentation that simply works" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1989,6 +2125,7 @@ requests = ">=2.26" name = "mkdocs-material-extensions" version = "1.3.1" description = "Extension pack for Python Markdown and MkDocs Material." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2000,6 +2137,7 @@ files = [ name = "mkdocs-version-annotations" version = "1.0.0" description = "MkDocs plugin to add custom admonitions for documenting version differences" +category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -2011,6 +2149,7 @@ files = [ name = "mkdocstrings" version = "0.22.0" description = "Automatic documentation from sources, for MkDocs." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2037,6 +2176,7 @@ python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"] name = "mkdocstrings-python" version = "1.5.2" description = "A Python handler for mkdocstrings." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2052,6 +2192,7 @@ mkdocstrings = ">=0.20" name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -2063,6 +2204,7 @@ files = [ name = "napalm" version = "4.1.0" description = "Network Automation and Programmability Abstraction Layer with Multivendor support" +category = "main" optional = false python-versions = "*" files = [ @@ -2095,6 +2237,7 @@ typing-extensions = ">=4.3.0" name = "nautobot" version = "2.1.4" description = "Source of truth and network automation platform." +category = "main" optional = false python-versions = ">=3.8,<3.12" files = [ @@ -2159,6 +2302,7 @@ sso = ["social-auth-core[openidconnect,saml] (>=4.4.2,<4.5.0)"] name = "nautobot-plugin-nornir" version = "2.0.0" description = "Nautobot Nornir plugin." +category = "main" optional = false python-versions = ">=3.8,<3.12" files = [ @@ -2177,6 +2321,7 @@ nautobot = ["nautobot (>=2.0.0,<3.0.0)"] name = "nautobot-ssot" version = "2.2.0" description = "Nautobot Single Source of Truth" +category = "main" optional = false python-versions = ">=3.8,<3.12" files = [ @@ -2207,6 +2352,7 @@ servicenow = ["Jinja2 (>=2.11.3)", "PyYAML (>=6)", "ijson (>=2.5.1)", "oauthlib name = "ncclient" version = "0.6.15" description = "Python library for NETCONF clients" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -2223,6 +2369,7 @@ six = "*" name = "netaddr" version = "0.8.0" description = "A network address manipulation library for Python" +category = "main" optional = false python-versions = "*" files = [ @@ -2234,6 +2381,7 @@ files = [ name = "netmiko" version = "4.3.0" description = "Multi-vendor library to simplify legacy CLI connections to network devices" +category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2253,6 +2401,7 @@ textfsm = ">=1.1.3" name = "netutils" version = "1.6.0" description = "Common helper functions useful in network automation." +category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2267,6 +2416,7 @@ optionals = ["jsonschema (>=4.17.3,<5.0.0)", "napalm (>=4.0.0,<5.0.0)"] name = "nh3" version = "0.2.15" description = "Python bindings to the ammonia HTML sanitization library." +category = "main" optional = false python-versions = "*" files = [ @@ -2292,6 +2442,7 @@ files = [ name = "nornir" version = "3.4.1" description = "Pluggable multi-threaded framework with inventory management to help operate collections of devices" +category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2308,6 +2459,7 @@ mypy_extensions = ">=1.0.0,<2.0.0" name = "nornir-jinja2" version = "0.2.0" description = "Jinja2 plugins for nornir" +category = "main" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -2323,6 +2475,7 @@ nornir = ">=3,<4" name = "nornir-napalm" version = "0.4.0" description = "NAPALM's plugins for nornir" +category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -2338,6 +2491,7 @@ nornir = ">=3,<4" name = "nornir-nautobot" version = "3.1.1" description = "Nornir Nautobot" +category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2363,6 +2517,7 @@ mikrotik-driver = ["routeros-api (>=0.17.0,<0.18.0)"] name = "nornir-netmiko" version = "1.0.1" description = "Netmiko's plugins for Nornir" +category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2377,6 +2532,7 @@ netmiko = ">=4.0.0,<5.0.0" name = "nornir-utils" version = "0.2.0" description = "Collection of plugins and functions for nornir that don't require external dependencies" +category = "main" optional = false python-versions = ">=3.6.2,<4.0.0" files = [ @@ -2392,6 +2548,7 @@ nornir = ">=3,<4" name = "ntc-templates" version = "4.3.0" description = "TextFSM Templates for Network Devices, and Python wrapper for TextFSM's CliTable." +category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2406,6 +2563,7 @@ textfsm = ">=1.1.0,<2.0.0" name = "oauthlib" version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2418,10 +2576,26 @@ rsa = ["cryptography (>=3.0.0)"] signals = ["blinker (>=1.4.0)"] signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] +[[package]] +name = "ordered-set" +version = "4.1.0" +description = "An OrderedSet is a custom MutableSet that remembers its order, so that every" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ordered-set-4.1.0.tar.gz", hash = "sha256:694a8e44c87657c59292ede72891eb91d34131f6531463aab3009191c77364a8"}, + {file = "ordered_set-4.1.0-py3-none-any.whl", hash = "sha256:046e1132c71fcf3330438a539928932caf51ddbc582496833e23de611de14562"}, +] + +[package.extras] +dev = ["black", "mypy", "pytest"] + [[package]] name = "packaging" version = "23.2" description = "Core utilities for Python packages" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2433,6 +2607,7 @@ files = [ name = "paramiko" version = "3.4.0" description = "SSH2 protocol library" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2454,6 +2629,7 @@ invoke = ["invoke (>=2.0)"] name = "pathspec" version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2465,6 +2641,7 @@ files = [ name = "pbr" version = "6.0.0" description = "Python Build Reasonableness" +category = "dev" optional = false python-versions = ">=2.6" files = [ @@ -2476,6 +2653,7 @@ files = [ name = "pillow" version = "10.2.0" description = "Python Imaging Library (Fork)" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2561,6 +2739,7 @@ xmp = ["defusedxml"] name = "pkgutil-resolve-name" version = "1.3.10" description = "Resolve a name to an object." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2572,6 +2751,7 @@ files = [ name = "platformdirs" version = "4.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2587,6 +2767,7 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest- name = "prometheus-client" version = "0.17.1" description = "Python client for the Prometheus monitoring system." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2601,6 +2782,7 @@ twisted = ["twisted"] name = "promise" version = "2.3" description = "Promises/A+ implementation for Python" +category = "main" optional = false python-versions = "*" files = [ @@ -2617,6 +2799,7 @@ test = ["coveralls", "futures", "mock", "pytest (>=2.7.3)", "pytest-benchmark", name = "prompt-toolkit" version = "3.0.43" description = "Library for building powerful interactive command lines in Python" +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -2631,6 +2814,7 @@ wcwidth = "*" name = "psycopg2-binary" version = "2.9.9" description = "psycopg2 - Python-PostgreSQL Database Adapter" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2712,6 +2896,7 @@ files = [ name = "pycodestyle" version = "2.9.1" description = "Python style guide checker" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2723,6 +2908,7 @@ files = [ name = "pycparser" version = "2.21" description = "C parser in Python" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2734,6 +2920,7 @@ files = [ name = "pydantic" version = "1.10.14" description = "Data validation and settings management using python type hints" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2786,6 +2973,7 @@ email = ["email-validator (>=1.0.3)"] name = "pydocstyle" version = "6.3.0" description = "Python docstring style checker" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2803,6 +2991,7 @@ toml = ["tomli (>=1.2.3)"] name = "pyeapi" version = "1.0.2" description = "Python Client for eAPI" +category = "main" optional = false python-versions = "*" files = [ @@ -2820,6 +3009,7 @@ test = ["coverage", "mock"] name = "pyflakes" version = "2.5.0" description = "passive checker of Python programs" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2831,6 +3021,7 @@ files = [ name = "pygments" version = "2.17.2" description = "Pygments is a syntax highlighting package written in Python." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2846,6 +3037,7 @@ windows-terminal = ["colorama (>=0.4.6)"] name = "pyjwt" version = "2.8.0" description = "JSON Web Token implementation in Python" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2863,6 +3055,7 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] name = "pylint" version = "2.17.7" description = "python code static checker" +category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -2892,6 +3085,7 @@ testutils = ["gitpython (>3)"] name = "pylint-django" version = "2.5.5" description = "A Pylint plugin to help Pylint understand the Django web framework" +category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -2910,6 +3104,7 @@ with-django = ["Django (>=2.2)"] name = "pylint-nautobot" version = "0.2.1" description = "Custom Pylint Rules for Nautobot" +category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -2927,6 +3122,7 @@ tomli = ">=2.0.1,<3.0.0" name = "pylint-plugin-utils" version = "0.8.2" description = "Utilities and helpers for writing Pylint plugins" +category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -2941,6 +3137,7 @@ pylint = ">=1.7" name = "pymdown-extensions" version = "10.4" description = "Extension pack for Python Markdown." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2959,6 +3156,7 @@ extra = ["pygments (>=2.12)"] name = "pynacl" version = "1.5.0" description = "Python binding to the Networking and Cryptography (NaCl) library" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2985,6 +3183,7 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] name = "pynautobot" version = "2.0.2" description = "Nautobot API client library" +category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -3001,6 +3200,7 @@ urllib3 = ">=1.21.1,<1.27" name = "pyparsing" version = "3.1.1" description = "pyparsing module - Classes and methods to define and execute parsing grammars" +category = "main" optional = false python-versions = ">=3.6.8" files = [ @@ -3015,6 +3215,7 @@ diagrams = ["jinja2", "railroad-diagrams"] name = "pyserial" version = "3.5" description = "Python Serial Port Extension" +category = "main" optional = false python-versions = "*" files = [ @@ -3029,6 +3230,7 @@ cp2110 = ["hidapi"] name = "python-crontab" version = "3.0.0" description = "Python Crontab API" +category = "main" optional = false python-versions = "*" files = [ @@ -3047,6 +3249,7 @@ cron-schedule = ["croniter"] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -3061,6 +3264,7 @@ six = ">=1.5" name = "python-slugify" version = "8.0.4" description = "A Python slugify application that also handles Unicode" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3078,6 +3282,7 @@ unidecode = ["Unidecode (>=1.1.1)"] name = "python3-openid" version = "3.2.0" description = "OpenID support for modern servers and consumers." +category = "main" optional = false python-versions = "*" files = [ @@ -3096,6 +3301,7 @@ postgresql = ["psycopg2"] name = "pytz" version = "2024.1" description = "World timezone definitions, modern and historical" +category = "main" optional = false python-versions = "*" files = [ @@ -3107,6 +3313,7 @@ files = [ name = "pyuwsgi" version = "2.0.23.post0" description = "The uWSGI server" +category = "main" optional = false python-versions = "*" files = [ @@ -3159,6 +3366,7 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3218,6 +3426,7 @@ files = [ name = "pyyaml-env-tag" version = "0.1" description = "A custom YAML tag for referencing environment variables in YAML files. " +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3232,6 +3441,7 @@ pyyaml = "*" name = "redis" version = "5.0.1" description = "Python client for Redis database and key-value store" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3250,6 +3460,7 @@ ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)" name = "referencing" version = "0.33.0" description = "JSON Referencing + Python" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3265,6 +3476,7 @@ rpds-py = ">=0.7.0" name = "regex" version = "2023.12.25" description = "Alternative regular expression module, to replace re." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3367,6 +3579,7 @@ files = [ name = "requests" version = "2.31.0" description = "Python HTTP for Humans." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3388,6 +3601,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-oauthlib" version = "1.3.1" description = "OAuthlib authentication support for Requests." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -3406,6 +3620,7 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] name = "rich" version = "13.7.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -3425,6 +3640,7 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] name = "rpds-py" version = "0.18.0" description = "Python bindings to Rust's persistent data structures (rpds)" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3533,6 +3749,7 @@ files = [ name = "ruamel-yaml" version = "0.18.6" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3551,6 +3768,7 @@ jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] name = "ruamel-yaml-clib" version = "0.2.8" description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3610,6 +3828,7 @@ files = [ name = "rx" version = "1.6.3" description = "Reactive Extensions (Rx) for Python" +category = "main" optional = false python-versions = "*" files = [ @@ -3620,6 +3839,7 @@ files = [ name = "scp" version = "0.14.5" description = "scp module for paramiko" +category = "main" optional = false python-versions = "*" files = [ @@ -3634,6 +3854,7 @@ paramiko = "*" name = "setuptools" version = "69.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3650,6 +3871,7 @@ testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jar name = "singledispatch" version = "4.1.0" description = "Backport functools.singledispatch to older Pythons." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3665,6 +3887,7 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -3676,6 +3899,7 @@ files = [ name = "smmap" version = "5.0.1" description = "A pure Python implementation of a sliding window memory map manager" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3687,6 +3911,7 @@ files = [ name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3698,6 +3923,7 @@ files = [ name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "dev" optional = false python-versions = "*" files = [ @@ -3709,6 +3935,7 @@ files = [ name = "social-auth-app-django" version = "5.2.0" description = "Python Social Authentication, Django integration." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3724,6 +3951,7 @@ social-auth-core = ">=4.4.1" name = "social-auth-core" version = "4.5.3" description = "Python social authentication made simple." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3750,6 +3978,7 @@ saml = ["python3-saml (>=1.5.0)"] name = "sqlparse" version = "0.4.4" description = "A non-validating SQL parser." +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -3766,6 +3995,7 @@ test = ["pytest", "pytest-cov"] name = "stevedore" version = "5.1.0" description = "Manage dynamic plugins for Python applications" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3780,6 +4010,7 @@ pbr = ">=2.0.0,<2.1.0 || >2.1.0" name = "structlog" version = "22.3.0" description = "Structured Logging for Python" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3797,6 +4028,7 @@ typing = ["mypy", "rich", "twisted"] name = "svgwrite" version = "1.4.3" description = "A Python library to create SVG drawings." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3808,6 +4040,7 @@ files = [ name = "text-unidecode" version = "1.3" description = "The most basic Text::Unidecode port" +category = "main" optional = false python-versions = "*" files = [ @@ -3819,6 +4052,7 @@ files = [ name = "textfsm" version = "1.1.3" description = "Python module for parsing semi-structured text into python tables." +category = "main" optional = false python-versions = "*" files = [ @@ -3834,6 +4068,7 @@ six = "*" name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" +category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -3845,6 +4080,7 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3856,6 +4092,7 @@ files = [ name = "tomlkit" version = "0.12.3" description = "Style preserving TOML library" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3867,6 +4104,7 @@ files = [ name = "transitions" version = "0.9.0" description = "A lightweight, object-oriented Python state machine implementation with many extensions." +category = "main" optional = false python-versions = "*" files = [ @@ -3885,6 +4123,7 @@ test = ["pytest"] name = "ttp" version = "0.9.5" description = "Template Text Parser" +category = "main" optional = false python-versions = ">=2.7,<4.0" files = [ @@ -3900,6 +4139,7 @@ full = ["cerberus (>=1.3.0,<1.4.0)", "deepdiff (>=5.8.0,<5.9.0)", "jinja2 (>=3.0 name = "ttp-templates" version = "0.3.6" description = "Template Text Parser Templates collections" +category = "main" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -3917,6 +4157,7 @@ docs = ["mkdocs (==1.2.4)", "mkdocs-material (==7.2.2)", "mkdocs-material-extens name = "typing-extensions" version = "4.9.0" description = "Backported and Experimental Type Hints for Python 3.8+" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3928,6 +4169,7 @@ files = [ name = "tzdata" version = "2024.1" description = "Provider of IANA time zone data" +category = "main" optional = false python-versions = ">=2" files = [ @@ -3939,6 +4181,7 @@ files = [ name = "uritemplate" version = "4.1.1" description = "Implementation of RFC 6570 URI Templates" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3950,6 +4193,7 @@ files = [ name = "urllib3" version = "1.26.18" description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -3966,6 +4210,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "vine" version = "5.1.0" description = "Python promises." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3977,6 +4222,7 @@ files = [ name = "watchdog" version = "4.0.0" description = "Filesystem events monitoring" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -4018,6 +4264,7 @@ watchmedo = ["PyYAML (>=3.10)"] name = "wcwidth" version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" +category = "main" optional = false python-versions = "*" files = [ @@ -4029,6 +4276,7 @@ files = [ name = "wrapt" version = "1.16.0" description = "Module for decorators, wrappers and monkey patching." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -4108,6 +4356,7 @@ files = [ name = "yamllint" version = "1.34.0" description = "A linter for YAML files." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -4126,6 +4375,7 @@ dev = ["doc8", "flake8", "flake8-import-order", "rstcheck[sphinx]", "sphinx"] name = "yamlordereddictloader" version = "0.4.2" description = "YAML loader and dumper for PyYAML allowing to keep keys order." +category = "main" optional = false python-versions = "*" files = [ @@ -4140,6 +4390,7 @@ pyyaml = "*" name = "zipp" version = "3.17.0" description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4154,4 +4405,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.8,<3.12" -content-hash = "e94baa051993eec15b367e1a37faa54fb1099c3a96bd29d3841e2123d3414eed" +content-hash = "840fb06383c6c52bb58febbaf5a3d2487b8f46c0f82bb966cb62b6f8b83d3663" diff --git a/pyproject.toml b/pyproject.toml old mode 100644 new mode 100755 index 4d6dc55a..94ca30ad --- a/pyproject.toml +++ b/pyproject.toml @@ -32,6 +32,7 @@ zipp = "^3.4.0" nautobot = "^2.1.1" nautobot-ssot = "^2.2.0" nautobot-plugin-nornir = "2.0.0" +jdiff = "^0.0.6" [tool.poetry.group.dev.dependencies] From 60050bf20b3bd9f0131dad62b1716073f5f47c2d Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Thu, 15 Feb 2024 08:26:13 -0600 Subject: [PATCH 060/225] first work new parser with jdiff --- .../command_mappers/cisco_ios.yml | 17 +++++++ .../command_mappers/cisco_nxos.yml | 19 +++++++ .../command_mappers/juniper_junos.yml | 51 +++++++++++++++++++ nautobot_device_onboarding/jobs.py | 29 ++++------- .../nornir_plays/processor.py | 17 ++++--- nautobot_device_onboarding/utils/formatter.py | 26 ++++------ 6 files changed, 117 insertions(+), 42 deletions(-) create mode 100755 nautobot_device_onboarding/command_mappers/cisco_ios.yml create mode 100755 nautobot_device_onboarding/command_mappers/cisco_nxos.yml create mode 100755 nautobot_device_onboarding/command_mappers/juniper_junos.yml diff --git a/nautobot_device_onboarding/command_mappers/cisco_ios.yml b/nautobot_device_onboarding/command_mappers/cisco_ios.yml new file mode 100755 index 00000000..6b90f851 --- /dev/null +++ b/nautobot_device_onboarding/command_mappers/cisco_ios.yml @@ -0,0 +1,17 @@ +--- +device_onboarding: + hostname: + jpath: "[*].hostname" + command: "show version" + serial: + command: "show version" + jpath: "[*].serial[0]" + device_type: + command: "show inventory" + jpath: "[?name=='Chassis'].pid" + mgmt_interface: + command: "show interfaces" + jpath: "[?ip_address=='{{ host_info }}'].interface" + mask_length: + command: "show interfaces" + jpath: "[?ip_address=='{{ host_info }}'].prefix_length" diff --git a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml new file mode 100755 index 00000000..b8ac919d --- /dev/null +++ b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml @@ -0,0 +1,19 @@ +--- +# Temporary idea that could be via datasources eventually. +device_onboarding: + required_fields: + hostname: + command: "show version" + your_key: "hostname" + serial: + command: "show version" + your_key: "serial" + device_type: + command: "show version" + your_key: "platform" + mgmt_interface: + command: "show interfaces" + your_key: "interface" + mask_length: + command: "show interfaces" + your_key: "prefix_length" diff --git a/nautobot_device_onboarding/command_mappers/juniper_junos.yml b/nautobot_device_onboarding/command_mappers/juniper_junos.yml new file mode 100755 index 00000000..506879a3 --- /dev/null +++ b/nautobot_device_onboarding/command_mappers/juniper_junos.yml @@ -0,0 +1,51 @@ +--- +# Temporary idea that could be via datasources eventually. +device_onboarding: + required_fields: + hostname: + command: "show version" + your_key: "name" + serial: + command: "show version" + your_key: "serial" + device_type: + command: "show version" + your_key: "model" + mgmt_interface: + command: "show interfaces" + your_key: "interface" + mask_length: + command: "show interfaces" + your_key: "('destination').split("/")[1]" + # your_key: "{{destination | ipaddressip_network .split("/")[1] }}" + +# device_onboarding: +# - model: "dcim.Device" +# your_key: "{{ serial }}" +# model_key: "serial" +# source: +# parser: "ntc-templates" +# arg: "show version" +# - model: "dcim.Interface" +# your_key: "ethernet" +# model_key: "type" +# source: +# parser: "constant" +# - model: "dcim.Device" +# your_key: "{{ osversion }}" +# model_key: "cf_software_version" +# source: +# parser: "ntc-templates" +# arg: "show version" + + +# @kens_magic +# def device_serial(): +# pass + + +# def load(): +# for device in devices: +# serial = device_serial() + + diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 80424bc2..d1031922 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -508,14 +508,14 @@ class Meta: secrets_group = ObjectVar(model=SecretsGroup) platform = ObjectVar(model=Platform, required=False) - def _process_result(self, command_result, ip_addresses): - """Process the data returned from devices.""" - processed_device_data = {} - for ip_address in ip_addresses: - processed_device_data[ip_address] = command_result[ip_address] - if self.debug: - self.logger.debug(f"Processed CommandGetterDO return for {ip_address}: {command_result[ip_address]}") - return processed_device_data + # def _process_result(self, command_result, ip_addresses): + # """Process the data returned from devices.""" + # processed_device_data = {} + # for ip_address in ip_addresses: + # processed_device_data[ip_address] = command_result[ip_address] + # if self.debug: + # self.logger.debug(f"Processed CommandGetterDO return for {ip_address}: {command_result[ip_address]}") + # return processed_device_data def run(self, *args, **kwargs): """Process onboarding task from ssot-ni job.""" @@ -542,19 +542,12 @@ def run(self, *args, **kwargs): entered_ip, self.platform, self.port, self.secrets_group ) nr_with_processors.inventory.hosts.update(single_host_inventory_constructed) - nr_result_temp = nr_with_processors.run(task=netmiko_send_commands) - print(nr_result_temp) - final_result = self._process_result(compiled_results, self.ip_addresses) - - # Remove before final merge # - for host, data in nr_with_processors.inventory.hosts.items(): - self.logger.info("%s;\n%s", host, data.dict()) - # End # - + nr_with_processors.run(task=netmiko_send_commands) + # final_result = self._process_result(compiled_results, self.ip_addresses) except Exception as err: # pylint: disable=broad-exception-caught self.logger.info("Error: %s", err) return err - return final_result + return compiled_results class CommandGetterNetworkImporter(Job): diff --git a/nautobot_device_onboarding/nornir_plays/processor.py b/nautobot_device_onboarding/nornir_plays/processor.py index aa2b7768..f20def9d 100755 --- a/nautobot_device_onboarding/nornir_plays/processor.py +++ b/nautobot_device_onboarding/nornir_plays/processor.py @@ -3,7 +3,7 @@ from typing import Dict from nautobot_device_onboarding.utils.formatter import ( - format_ob_data, + extract_show_data, format_ob_data_ios, format_ob_data_junos, format_ob_data_nxos, @@ -165,14 +165,15 @@ def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult self.logger.info(f"subtask_instance_completed Subtask completed {task.name}.", extra={"object": task.host}) self.logger.info(f"subtask_instance_completed Subtask result {result.result}.", extra={"object": task.host}) - self.data[host.name][task.name].update({ + self.data[host.name].update({ "failed": result.failed, - "subtask_result": result.result, + # "subtask_result": result.result, }) - - formatted_data = format_ob_data(host, result) - print(formatted_data) - self.data[host.name][task.name] = formatted_data + formatted_data = extract_show_data(host, result) + print(f"current formated data = {formatted_data}") + for k, v in formatted_data.items(): + self.data[host.name][k] = v + # self.data[host.name].update(formatted_data) def subtask_instance_started(self, task: Task, host: Host) -> None: # show command start """Processor for Logging on SubTask Start.""" @@ -182,4 +183,4 @@ def subtask_instance_started(self, task: Task, host: Host) -> None: # show comm "manufacturer": host.platform.split("_")[0].title() if host.platform else "PLACEHOLDER", "network_driver": host.platform, } - self.data[host.name].update({task.name:{}}) + # self.data[host.name].update({task.name:{}}) diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index 04bd0b4f..747a8800 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -27,12 +27,12 @@ def load_yaml_datafile(filename, config=None): ) jinja_env.filters = engines["jinja"].env.filters template = jinja_env.get_template(filename) - print(template) populated = template.render(config) + print(populated) return yaml.safe_load(populated) -def format_ob_data(host, multi_result): +def extract_show_data(host, multi_result): """_summary_ Args: @@ -41,24 +41,18 @@ def format_ob_data(host, multi_result): result is a MultiResult Nornir Object for a single host. """ - default_dict = { - "hostname": "", - "serial": "", - "device_type": "", - "mgmt_interface": "", - "manufacturer": "", - "platform": "", - "network_driver": "", - "mask_length": 0, - } host_platform = host.platform if host_platform == "cisco_xe": host_platform = "cisco_ios" - command_jpaths = load_yaml_datafile(f"{host_platform}.yml", config={}) + command_jpaths = load_yaml_datafile(f"{host_platform}.yml", config={"host_info": host}) + result_dict = {} for default_dict_field, command_info in command_jpaths['device_onboarding'].items(): - extracted_value = extract_data_from_json(multi_result.result, command_info['jpath'], exclude=None) - default_dict[default_dict_field] = extracted_value - return default_dict + if command_info["command"] == multi_result[0].name: + extracted_value = extract_data_from_json(multi_result[0].result, command_info['jpath']) + if isinstance(extracted_value, list) and len(extracted_value) == 1: + extracted_value = extracted_value[0] + result_dict[default_dict_field] = extracted_value + return result_dict from nautobot_device_onboarding.constants import ( CISCO_INTERFACE_ABBREVIATIONS, From a1e291d624bdbe19d8991c6b513ab26f163ab30a Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Thu, 15 Feb 2024 12:59:50 -0600 Subject: [PATCH 061/225] update structures --- .../command_mappers/arista_eos.yml | 17 +++++ .../command_mappers/cisco_nxos.yml | 32 +++++----- .../command_mappers/juniper_junos.yml | 64 +++++-------------- 3 files changed, 47 insertions(+), 66 deletions(-) create mode 100755 nautobot_device_onboarding/command_mappers/arista_eos.yml diff --git a/nautobot_device_onboarding/command_mappers/arista_eos.yml b/nautobot_device_onboarding/command_mappers/arista_eos.yml new file mode 100755 index 00000000..6b90f851 --- /dev/null +++ b/nautobot_device_onboarding/command_mappers/arista_eos.yml @@ -0,0 +1,17 @@ +--- +device_onboarding: + hostname: + jpath: "[*].hostname" + command: "show version" + serial: + command: "show version" + jpath: "[*].serial[0]" + device_type: + command: "show inventory" + jpath: "[?name=='Chassis'].pid" + mgmt_interface: + command: "show interfaces" + jpath: "[?ip_address=='{{ host_info }}'].interface" + mask_length: + command: "show interfaces" + jpath: "[?ip_address=='{{ host_info }}'].prefix_length" diff --git a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml index b8ac919d..6b90f851 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml @@ -1,19 +1,17 @@ --- -# Temporary idea that could be via datasources eventually. device_onboarding: - required_fields: - hostname: - command: "show version" - your_key: "hostname" - serial: - command: "show version" - your_key: "serial" - device_type: - command: "show version" - your_key: "platform" - mgmt_interface: - command: "show interfaces" - your_key: "interface" - mask_length: - command: "show interfaces" - your_key: "prefix_length" + hostname: + jpath: "[*].hostname" + command: "show version" + serial: + command: "show version" + jpath: "[*].serial[0]" + device_type: + command: "show inventory" + jpath: "[?name=='Chassis'].pid" + mgmt_interface: + command: "show interfaces" + jpath: "[?ip_address=='{{ host_info }}'].interface" + mask_length: + command: "show interfaces" + jpath: "[?ip_address=='{{ host_info }}'].prefix_length" diff --git a/nautobot_device_onboarding/command_mappers/juniper_junos.yml b/nautobot_device_onboarding/command_mappers/juniper_junos.yml index 506879a3..6b90f851 100755 --- a/nautobot_device_onboarding/command_mappers/juniper_junos.yml +++ b/nautobot_device_onboarding/command_mappers/juniper_junos.yml @@ -1,51 +1,17 @@ --- -# Temporary idea that could be via datasources eventually. device_onboarding: - required_fields: - hostname: - command: "show version" - your_key: "name" - serial: - command: "show version" - your_key: "serial" - device_type: - command: "show version" - your_key: "model" - mgmt_interface: - command: "show interfaces" - your_key: "interface" - mask_length: - command: "show interfaces" - your_key: "('destination').split("/")[1]" - # your_key: "{{destination | ipaddressip_network .split("/")[1] }}" - -# device_onboarding: -# - model: "dcim.Device" -# your_key: "{{ serial }}" -# model_key: "serial" -# source: -# parser: "ntc-templates" -# arg: "show version" -# - model: "dcim.Interface" -# your_key: "ethernet" -# model_key: "type" -# source: -# parser: "constant" -# - model: "dcim.Device" -# your_key: "{{ osversion }}" -# model_key: "cf_software_version" -# source: -# parser: "ntc-templates" -# arg: "show version" - - -# @kens_magic -# def device_serial(): -# pass - - -# def load(): -# for device in devices: -# serial = device_serial() - - + hostname: + jpath: "[*].hostname" + command: "show version" + serial: + command: "show version" + jpath: "[*].serial[0]" + device_type: + command: "show inventory" + jpath: "[?name=='Chassis'].pid" + mgmt_interface: + command: "show interfaces" + jpath: "[?ip_address=='{{ host_info }}'].interface" + mask_length: + command: "show interfaces" + jpath: "[?ip_address=='{{ host_info }}'].prefix_length" From 03729affa23c5e216094fee597081e2c01e5cb60 Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Thu, 15 Feb 2024 20:27:51 +0000 Subject: [PATCH 062/225] please help --- nautobot_device_onboarding/jobs.py | 15 +++++++++++---- .../nornir_plays/processor.py | 8 ++++---- 2 files changed, 15 insertions(+), 8 deletions(-) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 531ee437..c50fccc5 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -508,10 +508,17 @@ class Meta: def _process_result(self, command_result, ip_addresses): """Process the data returned from devices.""" processed_device_data = {} + print(command_result) for ip_address in ip_addresses: - processed_device_data[ip_address] = command_result[ip_address] - if self.debug: - self.logger.debug(f"Processed CommandGetterDO return for {ip_address}: {command_result[ip_address]}") + if command_result.get(ip_address): + processed_device_data[ip_address] = command_result[ip_address] + if self.debug: + self.logger.debug(f"Processed CommandGetterDO return for {ip_address}: {command_result[ip_address]}") + + if command_result[ip_address]["failed"]: + print("inside if") + processed_device_data[ip_address]["failed"] = True + return processed_device_data def run(self, *args, **kwargs): @@ -548,7 +555,7 @@ def run(self, *args, **kwargs): # End # except Exception as err: # pylint: disable=broad-exception-caught - self.logger.info("Error: %s", err) + self.logger.error("Error: %s", err) return err return final_result diff --git a/nautobot_device_onboarding/nornir_plays/processor.py b/nautobot_device_onboarding/nornir_plays/processor.py index 5dc2d5df..0a308d34 100755 --- a/nautobot_device_onboarding/nornir_plays/processor.py +++ b/nautobot_device_onboarding/nornir_plays/processor.py @@ -57,10 +57,10 @@ def task_instance_completed(self, task: Task, host: Host, result: MultiResult) - else: self.logger.info(f"Task Name: {task.name} Task Result: {result.result}", extra={"object": task.host}) - # self.data[task.name][host.name] = { - # "completed": True, - # "failed": result.failed, - # } + self.data[task.name][host.name] = { + "completed": True, + "failed": result.failed, + } def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult) -> None: """Processor for Logging on SubTask Completed.""" From e194adac60dd28c8520b5a13b1a94771200267a6 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Thu, 15 Feb 2024 14:50:39 -0600 Subject: [PATCH 063/225] fix result dict on fail --- nautobot_device_onboarding/jobs.py | 26 +++++++++++--------------- 1 file changed, 11 insertions(+), 15 deletions(-) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index c50fccc5..1778ba76 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -11,15 +11,6 @@ from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from nautobot.extras.models import Role, SecretsGroup, SecretsGroupAssociation, Status, Tag from nautobot.ipam.models import Namespace -from nautobot_plugin_nornir.constants import NORNIR_SETTINGS -from nautobot_plugin_nornir.plugins.inventory.nautobot_orm import NautobotORMInventory -from nautobot_ssot.jobs.base import DataSource -from nornir import InitNornir -from nornir.core.plugins.inventory import InventoryPluginRegister - -# from nornir.core.task import Result, Task -# from nornir_nautobot.exceptions import NornirNautobotException -from nornir_netmiko import netmiko_send_command # from nautobot_device_onboarding.constants import PLATFORM_COMMAND_MAP from nautobot_device_onboarding.diffsync.adapters.network_importer_adapters import ( @@ -44,6 +35,15 @@ ) from nautobot_device_onboarding.utils.helper import get_job_filter from nautobot_device_onboarding.utils.inventory_creator import _set_inventory +from nautobot_plugin_nornir.constants import NORNIR_SETTINGS +from nautobot_plugin_nornir.plugins.inventory.nautobot_orm import NautobotORMInventory +from nautobot_ssot.jobs.base import DataSource +from nornir import InitNornir +from nornir.core.plugins.inventory import InventoryPluginRegister + +# from nornir.core.task import Result, Task +# from nornir_nautobot.exceptions import NornirNautobotException +from nornir_netmiko import netmiko_send_command InventoryPluginRegister.register("nautobot-inventory", NautobotORMInventory) InventoryPluginRegister.register("empty-inventory", EmptyInventory) @@ -508,17 +508,13 @@ class Meta: def _process_result(self, command_result, ip_addresses): """Process the data returned from devices.""" processed_device_data = {} - print(command_result) for ip_address in ip_addresses: if command_result.get(ip_address): processed_device_data[ip_address] = command_result[ip_address] if self.debug: self.logger.debug(f"Processed CommandGetterDO return for {ip_address}: {command_result[ip_address]}") - - if command_result[ip_address]["failed"]: - print("inside if") - processed_device_data[ip_address]["failed"] = True - + else: + processed_device_data[ip_address] = {"failed": True} return processed_device_data def run(self, *args, **kwargs): From 7389d87c297cc452152b0e9d4b03ad1b9a6317de Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Thu, 15 Feb 2024 21:19:18 +0000 Subject: [PATCH 064/225] cleanup --- .../models/network_importer_models.py | 1 - nautobot_device_onboarding/jobs.py | 22 ++++--- .../utils/diffsync_utils.py | 3 +- nautobot_device_onboarding/utils/formatter.py | 57 ------------------- 4 files changed, 11 insertions(+), 72 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/models/network_importer_models.py b/nautobot_device_onboarding/diffsync/models/network_importer_models.py index 5e8d27ba..0d5b8e7e 100644 --- a/nautobot_device_onboarding/diffsync/models/network_importer_models.py +++ b/nautobot_device_onboarding/diffsync/models/network_importer_models.py @@ -1,6 +1,5 @@ """Diffsync models.""" -from dataclasses import dataclass from typing import List, Optional from diffsync import DiffSync, DiffSyncModel diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 1778ba76..f73c0bb8 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -11,8 +11,13 @@ from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from nautobot.extras.models import Role, SecretsGroup, SecretsGroupAssociation, Status, Tag from nautobot.ipam.models import Namespace +from nautobot_plugin_nornir.constants import NORNIR_SETTINGS +from nautobot_plugin_nornir.plugins.inventory.nautobot_orm import NautobotORMInventory +from nautobot_ssot.jobs.base import DataSource +from nornir import InitNornir +from nornir.core.plugins.inventory import InventoryPluginRegister +from nornir_netmiko import netmiko_send_command -# from nautobot_device_onboarding.constants import PLATFORM_COMMAND_MAP from nautobot_device_onboarding.diffsync.adapters.network_importer_adapters import ( NetworkImporterNautobotAdapter, NetworkImporterNetworkAdapter, @@ -35,15 +40,6 @@ ) from nautobot_device_onboarding.utils.helper import get_job_filter from nautobot_device_onboarding.utils.inventory_creator import _set_inventory -from nautobot_plugin_nornir.constants import NORNIR_SETTINGS -from nautobot_plugin_nornir.plugins.inventory.nautobot_orm import NautobotORMInventory -from nautobot_ssot.jobs.base import DataSource -from nornir import InitNornir -from nornir.core.plugins.inventory import InventoryPluginRegister - -# from nornir.core.task import Result, Task -# from nornir_nautobot.exceptions import NornirNautobotException -from nornir_netmiko import netmiko_send_command InventoryPluginRegister.register("nautobot-inventory", NautobotORMInventory) InventoryPluginRegister.register("empty-inventory", EmptyInventory) @@ -512,7 +508,9 @@ def _process_result(self, command_result, ip_addresses): if command_result.get(ip_address): processed_device_data[ip_address] = command_result[ip_address] if self.debug: - self.logger.debug(f"Processed CommandGetterDO return for {ip_address}: {command_result[ip_address]}") + self.logger.debug( + f"Processed CommandGetterDO return for {ip_address}: {command_result[ip_address]}" + ) else: processed_device_data[ip_address] = {"failed": True} return processed_device_data @@ -617,7 +615,7 @@ def run(self, *args, **kwargs): for command in commands: command_result = nornir_obj.run(task=netmiko_send_command, command_string=command, use_textfsm=True) - # all_results = format_ni_data_cisco_ios(command=command,command_result=command_result) + #TODO: Move this to a formatter for host_name, result in command_result.items(): if command_result.failed: failed_results = {host_name: {"Failed": True, "subtask_result": result.result}} diff --git a/nautobot_device_onboarding/utils/diffsync_utils.py b/nautobot_device_onboarding/utils/diffsync_utils.py index afb11aca..a9193216 100644 --- a/nautobot_device_onboarding/utils/diffsync_utils.py +++ b/nautobot_device_onboarding/utils/diffsync_utils.py @@ -4,7 +4,6 @@ from django.core.exceptions import ObjectDoesNotExist, ValidationError from nautobot.apps.choices import PrefixTypeChoices -from nautobot.extras.models import Status from nautobot.ipam.models import IPAddress, Prefix @@ -35,7 +34,7 @@ def get_or_create_prefix(host, mask_length, default_status, namespace, job=None) def get_or_create_ip_address(host, mask_length, namespace, default_ip_status, default_prefix_status, job=None): """Attempt to get a Nautobot IPAddress, create a new one if necessary.""" ip_address = None - default_status = Status.objects.get(name="Active") + try: ip_address = IPAddress.objects.get( host=host, diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index 369fb700..70e98a4b 100644 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -119,60 +119,3 @@ def normalize_tagged_interface(tagged_interface): if tagged_interface in TAGGED_INTERFACE_TYPES: return TAGGED_INTERFACE_TYPES[tagged_interface] return "" - - -def format_ni_data_cisco_ios(command, command_result): - """Format cisco_ios data.""" - all_results = {} - # command = ["show version", "show interfaces", "show vlan", "show interfaces switchport"] - for host_name, result in command_result.items(): - if host_name not in all_results: - all_results[host_name] = {"interfaces": {}, "serial": ""} - - if command == "show version": - serial_info = result.result[0] - serial_number = serial_info.get("serial") - all_results[host_name]["serial"] = serial_number[0] - elif command == "show interfaces": - print(f"Interfaces: {result.result}") - for interface_info in result.result: - interface_name = interface_info.get("interface") - media_type = interface_info.get("media_type") - hardware_type = interface_info.get("hardware_type") - mtu = interface_info.get("mtu") - description = interface_info.get("description") - mac_address = interface_info.get("mac_address") - link_status = interface_info.get("link_status") - - if link_status == "up": - link_status = True - else: - link_status = False - - type = "other" - if hardware_type == "EtherChannel": - type = "lag" - elif hardware_type == "Ethernet SVI": - type = "virtual" - elif media_type == "10/100/1000BaseTX": - type = "100base-tx" - else: - type = "other" - - all_results[host_name]["interfaces"][interface_name] = { - "mtu": mtu, - "type": type, - "media_type": media_type, - "hardware_type": hardware_type, - "description": description, - "mac_address": mac_address, - "enabled": link_status, - } - elif command == "show vlan": - print(f"Vlan: {result.result}") - elif command == "show interfaces switchport": - for interface_info in result.result: - print(f"Interfaces switchport: {result.result}") - interface_mode = interface_info.get("admin_mode") - access_vlan = interface_info.get("access_vlan") - return all_results From 6c7f3c3240019957f76f3db56e58169d1df28e10 Mon Sep 17 00:00:00 2001 From: David Cates Date: Thu, 15 Feb 2024 14:22:59 -0700 Subject: [PATCH 065/225] update logging and bump version --- .../diffsync/adapters/onboarding_adapters.py | 2 -- pyproject.toml | 2 +- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index 8ac36776..8f120d7a 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -150,8 +150,6 @@ def _handle_failed_connections(self, device_data): for ip_address in device_data: if device_data[ip_address].get("failed"): self.job.logger.error(f"Connection or data error for {ip_address}. This device will not be onboarded.") - if self.job.debug: - self.job.logger.error(device_data[ip_address].get("subtask_result")) failed_ip_addresses.append(ip_address) for ip_address in failed_ip_addresses: del device_data[ip_address] diff --git a/pyproject.toml b/pyproject.toml index 4d6dc55a..75dd2a9b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nautobot-device-onboarding" -version = "3.0.2a1" +version = "3.0.2a2" description = "A app for Nautobot to easily onboard new devices." authors = ["Network to Code, LLC "] license = "Apache-2.0" From c90038423c00dc35b155edf4f7ee59e60110bf12 Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Thu, 15 Feb 2024 21:30:59 +0000 Subject: [PATCH 066/225] remove logging --- nautobot_device_onboarding/jobs.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index f73c0bb8..46d12c00 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -543,11 +543,6 @@ def run(self, *args, **kwargs): nr_with_processors.run(task=netmiko_send_commands) final_result = self._process_result(compiled_results, self.ip_addresses) - # Remove before final merge # - for host, data in nr_with_processors.inventory.hosts.items(): - self.logger.info("%s;\n%s", host, data.dict()) - # End # - except Exception as err: # pylint: disable=broad-exception-caught self.logger.error("Error: %s", err) return err From e0da10f87e047b224d03741d27e193b2c3a2a3dd Mon Sep 17 00:00:00 2001 From: David Cates Date: Thu, 15 Feb 2024 15:58:45 -0700 Subject: [PATCH 067/225] update lock file --- poetry.lock | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index d57ff28f..0c0d7560 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2093,7 +2093,6 @@ typing-extensions = ">=4.3.0" [[package]] name = "nautobot" - version = "2.1.4" description = "Source of truth and network automation platform." optional = false @@ -2391,13 +2390,13 @@ nornir = ">=3,<4" [[package]] name = "ntc-templates" -version = "4.3.0" +version = "4.2.0" description = "TextFSM Templates for Network Devices, and Python wrapper for TextFSM's CliTable." optional = false python-versions = ">=3.8,<4.0" files = [ - {file = "ntc_templates-4.3.0-py3-none-any.whl", hash = "sha256:f9b4805dfd9d1516a29ae9f505409c17c7f14c958d47f1c1f57c9486af6164db"}, - {file = "ntc_templates-4.3.0.tar.gz", hash = "sha256:b6902389e86b868d76b64ea55c8225a0aa7aafe910b3a02b2a33b7b18fb27ef1"}, + {file = "ntc_templates-4.2.0-py3-none-any.whl", hash = "sha256:f41471c1375c1a86bb5958358339efe9e95d908ea33866125adafe36fbfe11dd"}, + {file = "ntc_templates-4.2.0.tar.gz", hash = "sha256:a06c0e786aa3aea429d345ea67f59cb6da43557c31aa65914969d0cd6b0c0dde"}, ] [package.dependencies] @@ -3181,7 +3180,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -4000,6 +3998,17 @@ files = [ {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, + {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, + {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, + {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, + {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, ] [package.extras] From 2558e7304e275eded9ef02b9e9f48f77e80975a2 Mon Sep 17 00:00:00 2001 From: David Cates Date: Thu, 15 Feb 2024 16:03:37 -0700 Subject: [PATCH 068/225] black --- nautobot_device_onboarding/jobs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 46d12c00..25c6e7af 100644 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -610,7 +610,7 @@ def run(self, *args, **kwargs): for command in commands: command_result = nornir_obj.run(task=netmiko_send_command, command_string=command, use_textfsm=True) - #TODO: Move this to a formatter + # TODO: Move this to a formatter for host_name, result in command_result.items(): if command_result.failed: failed_results = {host_name: {"Failed": True, "subtask_result": result.result}} From ff8a8a53ae7f13e842b0e21809266215d69e59a9 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Thu, 15 Feb 2024 17:16:39 -0600 Subject: [PATCH 069/225] getting everything reformatted still needs cleanup --- .../command_mappers/arista_eos.yml | 10 +- .../command_mappers/cisco_nxos.yml | 10 +- nautobot_device_onboarding/constants.py | 70 ++-- nautobot_device_onboarding/jobs.py | 33 +- .../nornir_plays/command_getter.py | 11 +- .../nornir_plays/processor.py | 224 +++++------ nautobot_device_onboarding/utils/formatter.py | 358 +++++++++--------- .../utils/inventory_creator.py | 13 +- 8 files changed, 351 insertions(+), 378 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/arista_eos.yml b/nautobot_device_onboarding/command_mappers/arista_eos.yml index 6b90f851..6df0be4b 100755 --- a/nautobot_device_onboarding/command_mappers/arista_eos.yml +++ b/nautobot_device_onboarding/command_mappers/arista_eos.yml @@ -5,13 +5,13 @@ device_onboarding: command: "show version" serial: command: "show version" - jpath: "[*].serial[0]" + jpath: "[*].serial" device_type: - command: "show inventory" - jpath: "[?name=='Chassis'].pid" + command: "show version" + jpath: "[*].platform" mgmt_interface: - command: "show interfaces" + command: "show interface" jpath: "[?ip_address=='{{ host_info }}'].interface" mask_length: - command: "show interfaces" + command: "show interface" jpath: "[?ip_address=='{{ host_info }}'].prefix_length" diff --git a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml index 6b90f851..6df0be4b 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml @@ -5,13 +5,13 @@ device_onboarding: command: "show version" serial: command: "show version" - jpath: "[*].serial[0]" + jpath: "[*].serial" device_type: - command: "show inventory" - jpath: "[?name=='Chassis'].pid" + command: "show version" + jpath: "[*].platform" mgmt_interface: - command: "show interfaces" + command: "show interface" jpath: "[?ip_address=='{{ host_info }}'].interface" mask_length: - command: "show interfaces" + command: "show interface" jpath: "[?ip_address=='{{ host_info }}'].prefix_length" diff --git a/nautobot_device_onboarding/constants.py b/nautobot_device_onboarding/constants.py index c9889ee9..c44412f6 100644 --- a/nautobot_device_onboarding/constants.py +++ b/nautobot_device_onboarding/constants.py @@ -9,41 +9,41 @@ "cisco_xr": "iosxr", } -PLATFORM_COMMAND_MAP = { - "cisco_ios": ["show version", "show inventory", "show interfaces"], - "cisco_nxos": ["show version", "show inventory", "show interface"], - "cisco_xe": ["show version", "show inventory", "show interfaces"], - "juniper_junos": ["show version", "show interfaces", "show chassis hardware"], -} +# PLATFORM_COMMAND_MAP = { +# "cisco_ios": ["show version", "show inventory", "show interfaces"], +# "cisco_nxos": ["show version", "show inventory", "show interface"], +# "cisco_xe": ["show version", "show inventory", "show interfaces"], +# "juniper_junos": ["show version", "show interfaces", "show chassis hardware"], +# } -CISCO_INTERFACE_ABBREVIATIONS = { - "Fa": "FastEthernet", - "Gi": "GigabitEthernet", - "Te": "TenGigabitEthernet", - "Twe": "TwentyFiveGigE", - "Fo": "FortyGigabitEthernet", - "Ap": "AppGigabitEthernet", - "Lo": "Loopback", - "Po": "Port-channel", - "BE": "Bundle-Ether", - "Vl": "Vlan", - "Tu": "Tunnel", -} +# CISCO_INTERFACE_ABBREVIATIONS = { +# "Fa": "FastEthernet", +# "Gi": "GigabitEthernet", +# "Te": "TenGigabitEthernet", +# "Twe": "TwentyFiveGigE", +# "Fo": "FortyGigabitEthernet", +# "Ap": "AppGigabitEthernet", +# "Lo": "Loopback", +# "Po": "Port-channel", +# "BE": "Bundle-Ether", +# "Vl": "Vlan", +# "Tu": "Tunnel", +# } -CISCO_TO_NAUTOBOT_INTERFACE_TYPE = { - "Fast Ethernet": "100base-tx", - "EtherChannel": "lag", - "Gigabit Ethernet": "1000base-tx", - "Ten Gigabit Ethernet": "10gbase-t", - "Twenty Five Gigabit Ethernet": "25gbase-t", - "Forty Gigabit Ethernet": "40gbase-t", - "AppGigabitEthernet": "40gbase-t", - "Port-channel": "lag", - "Ethernet SVI": "virtual", -} +# CISCO_TO_NAUTOBOT_INTERFACE_TYPE = { +# "Fast Ethernet": "100base-tx", +# "EtherChannel": "lag", +# "Gigabit Ethernet": "1000base-tx", +# "Ten Gigabit Ethernet": "10gbase-t", +# "Twenty Five Gigabit Ethernet": "25gbase-t", +# "Forty Gigabit Ethernet": "40gbase-t", +# "AppGigabitEthernet": "40gbase-t", +# "Port-channel": "lag", +# "Ethernet SVI": "virtual", +# } -TAGGED_INTERFACE_TYPES = { - "static access": "access", - "dynamic auto": "trunk-all", - "trunk": "trunk", -} +# TAGGED_INTERFACE_TYPES = { +# "static access": "access", +# "dynamic auto": "trunk-all", +# "trunk": "trunk", +# } diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index d1031922..17769134 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -11,9 +11,6 @@ from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from nautobot.extras.models import Role, SecretsGroup, SecretsGroupAssociation, Status, Tag from nautobot.ipam.models import Namespace - -# from nautobot_device_onboarding.constants import PLATFORM_COMMAND_MAP -from nautobot_device_onboarding.constants import PLATFORM_COMMAND_MAP from nautobot_device_onboarding.diffsync.adapters.network_importer_adapters import ( NetworkImporterNautobotAdapter, NetworkImporterNetworkAdapter, @@ -28,12 +25,13 @@ from nautobot_device_onboarding.nornir_plays.command_getter import netmiko_send_commands from nautobot_device_onboarding.nornir_plays.empty_inventory import EmptyInventory from nautobot_device_onboarding.nornir_plays.logger import NornirLogger -from nautobot_device_onboarding.nornir_plays.processor import ProcessorDO, ProcessorDONew -from nautobot_device_onboarding.utils.formatter import ( - normalize_interface_name, - normalize_interface_type, - normalize_tagged_interface, -) +from nautobot_device_onboarding.nornir_plays.processor import ProcessorDONew + +# from nautobot_device_onboarding.utils.formatter import ( +# normalize_interface_name, +# normalize_interface_type, +# normalize_tagged_interface, +# ) from nautobot_device_onboarding.utils.helper import get_job_filter from nautobot_device_onboarding.utils.inventory_creator import _set_inventory from nautobot_plugin_nornir.constants import NORNIR_SETTINGS @@ -41,11 +39,6 @@ from nautobot_ssot.jobs.base import DataSource from nornir import InitNornir from nornir.core.plugins.inventory import InventoryPluginRegister -from nornir.core.task import Result, Task -from nornir_nautobot.exceptions import NornirNautobotException - -# from nornir.core.task import Result, Task -# from nornir_nautobot.exceptions import NornirNautobotException from nornir_netmiko import netmiko_send_command InventoryPluginRegister.register("nautobot-inventory", NautobotORMInventory) @@ -54,7 +47,7 @@ PLUGIN_SETTINGS = settings.PLUGINS_CONFIG["nautobot_device_onboarding"] LOGGER = logging.getLogger(__name__) -COMMANDS = [] +# COMMANDS = [] name = "Device Onboarding/Network Importer" # pylint: disable=invalid-name @@ -508,15 +501,6 @@ class Meta: secrets_group = ObjectVar(model=SecretsGroup) platform = ObjectVar(model=Platform, required=False) - # def _process_result(self, command_result, ip_addresses): - # """Process the data returned from devices.""" - # processed_device_data = {} - # for ip_address in ip_addresses: - # processed_device_data[ip_address] = command_result[ip_address] - # if self.debug: - # self.logger.debug(f"Processed CommandGetterDO return for {ip_address}: {command_result[ip_address]}") - # return processed_device_data - def run(self, *args, **kwargs): """Process onboarding task from ssot-ni job.""" self.ip_addresses = kwargs["ip_addresses"].replace(" ", "").split(",") @@ -543,7 +527,6 @@ def run(self, *args, **kwargs): ) nr_with_processors.inventory.hosts.update(single_host_inventory_constructed) nr_with_processors.run(task=netmiko_send_commands) - # final_result = self._process_result(compiled_results, self.ip_addresses) except Exception as err: # pylint: disable=broad-exception-caught self.logger.info("Error: %s", err) return err diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index 6d8a547e..9a999ebb 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -3,10 +3,17 @@ from nornir.core.task import Task from nornir_netmiko.tasks import netmiko_send_command -from nautobot_device_onboarding.constants import PLATFORM_COMMAND_MAP + +def _get_commands_to_run(yaml_parsed_info): + """Load yaml file and look up all commands that need to be run.""" + commands = [] + for _, value in yaml_parsed_info['device_onboarding'].items(): + commands.append(value['command']) + return list(set(commands)) def netmiko_send_commands(task: Task): """Run commands specified in PLATFORM_COMMAND_MAP.""" - for command in PLATFORM_COMMAND_MAP.get(task.host.platform): + commands = _get_commands_to_run(task.host.data["platform_parsing_info"]) + for command in commands: task.run(task=netmiko_send_command, name=command, command_string=command, use_textfsm=True) diff --git a/nautobot_device_onboarding/nornir_plays/processor.py b/nautobot_device_onboarding/nornir_plays/processor.py index f20def9d..5054512b 100755 --- a/nautobot_device_onboarding/nornir_plays/processor.py +++ b/nautobot_device_onboarding/nornir_plays/processor.py @@ -2,109 +2,105 @@ from typing import Dict -from nautobot_device_onboarding.utils.formatter import ( +from nautobot_device_onboarding.utils.formatter import ( # format_ob_data_ios,; format_ob_data_junos,; format_ob_data_nxos, extract_show_data, - format_ob_data_ios, - format_ob_data_junos, - format_ob_data_nxos, ) from nornir.core.inventory import Host from nornir.core.task import AggregatedResult, MultiResult, Task from nornir_nautobot.exceptions import NornirNautobotException from nornir_nautobot.plugins.processors import BaseLoggingProcessor - -class ProcessorDO(BaseLoggingProcessor): - """Processor class for Device Onboarding jobs.""" - - def __init__(self, logger, command_outputs): - """Set logging facility.""" - self.logger = logger - self.data: Dict = command_outputs - - def task_started(self, task: Task) -> None: - """Boilerplate Nornir processor for task_started.""" - self.data[task.name] = {} - # self.data[task.name]["started"] = True - self.logger.info(f"Task Name: {task.name} started") - - def task_completed(self, task: Task, result: AggregatedResult) -> None: - """Boilerplate Nornir processor for task_instance_completed.""" - # self.data[task.name]["completed"] = True - self.logger.info(f"Task Name: {task.name} completed") - - def task_instance_started(self, task: Task, host: Host) -> None: - """Processor for Logging on Task Start.""" - self.logger.info(f"Starting {task.name}.", extra={"object": task.host}) - self.data[task.name][host.name] = {} - - def task_instance_completed(self, task: Task, host: Host, result: MultiResult) -> None: - """Nornir processor task completion for OS upgrades. - - Args: - task (Task): Nornir task individual object - host (Host): Host object with Nornir - result (MultiResult): Result from Nornir task - - Returns: - None - """ - # Complex logic to see if the task exception is expected, which is depicted by - # a sub task raising a NornirNautobotException. - if result.failed: - for level_1_result in result: - if hasattr(level_1_result, "exception") and hasattr(level_1_result.exception, "result"): - for level_2_result in level_1_result.exception.result: # type: ignore - if isinstance(level_2_result.exception, NornirNautobotException): - return - self.logger.critical(f"{task.name} failed: {result.exception}", extra={"object": task.host}) - else: - self.logger.info(f"Task Name: {task.name} Task Result: {result.result}", extra={"object": task.host}) - - # self.data[task.name][host.name] = { - # "completed": True, - # "failed": result.failed, - # } - - def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult) -> None: - """Processor for Logging on SubTask Completed.""" - self.logger.info(f"Subtask completed {task.name}.", extra={"object": task.host}) - self.logger.info(f"Subtask result {result.result}.", extra={"object": task.host}) - - self.data[task.name][host.name] = { - "failed": result.failed, - "subtask_result": result.result, - } - - if self.data[task.name][host.name].get("failed"): - self.data[host.name] = { - "failed": True, - "subtask_result": result.result, - } - elif host.name not in self.data: - self.data[host.name] = { - "platform": host.platform, - "manufacturer": host.platform.split("_")[0].title() if host.platform else "PLACEHOLDER", - "network_driver": host.platform, - } - - if host.platform in ["cisco_ios", "cisco_xe"]: - formatted_data = format_ob_data_ios(host, result) - elif host.platform == "cisco_nxos": - formatted_data = format_ob_data_nxos(host, result) - elif host.platform == "juniper_junos": - formatted_data = format_ob_data_junos(host, result) - else: - formatted_data = {} - self.logger.info(f"No formatter for platform: {host.platform}.", extra={"object": task.host}) - - self.data[host.name].update(formatted_data) - - def subtask_instance_started(self, task: Task, host: Host) -> None: - """Processor for Logging on SubTask Start.""" - self.logger.info(f"Subtask starting {task.name}.", extra={"object": task.host}) - self.data[task.name] = {} - # self.data[task.name][host.name] = {"started": True} +# class ProcessorDO(BaseLoggingProcessor): +# """Processor class for Device Onboarding jobs.""" + +# def __init__(self, logger, command_outputs): +# """Set logging facility.""" +# self.logger = logger +# self.data: Dict = command_outputs + +# def task_started(self, task: Task) -> None: +# """Boilerplate Nornir processor for task_started.""" +# self.data[task.name] = {} +# # self.data[task.name]["started"] = True +# self.logger.info(f"Task Name: {task.name} started") + +# def task_completed(self, task: Task, result: AggregatedResult) -> None: +# """Boilerplate Nornir processor for task_instance_completed.""" +# # self.data[task.name]["completed"] = True +# self.logger.info(f"Task Name: {task.name} completed") + +# def task_instance_started(self, task: Task, host: Host) -> None: +# """Processor for Logging on Task Start.""" +# self.logger.info(f"Starting {task.name}.", extra={"object": task.host}) +# self.data[task.name][host.name] = {} + +# def task_instance_completed(self, task: Task, host: Host, result: MultiResult) -> None: +# """Nornir processor task completion for OS upgrades. + +# Args: +# task (Task): Nornir task individual object +# host (Host): Host object with Nornir +# result (MultiResult): Result from Nornir task + +# Returns: +# None +# """ +# # Complex logic to see if the task exception is expected, which is depicted by +# # a sub task raising a NornirNautobotException. +# if result.failed: +# for level_1_result in result: +# if hasattr(level_1_result, "exception") and hasattr(level_1_result.exception, "result"): +# for level_2_result in level_1_result.exception.result: # type: ignore +# if isinstance(level_2_result.exception, NornirNautobotException): +# return +# self.logger.critical(f"{task.name} failed: {result.exception}", extra={"object": task.host}) +# else: +# self.logger.info(f"Task Name: {task.name} Task Result: {result.result}", extra={"object": task.host}) + +# # self.data[task.name][host.name] = { +# # "completed": True, +# # "failed": result.failed, +# # } + +# def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult) -> None: +# """Processor for Logging on SubTask Completed.""" +# self.logger.info(f"Subtask completed {task.name}.", extra={"object": task.host}) +# self.logger.info(f"Subtask result {result.result}.", extra={"object": task.host}) + +# self.data[task.name][host.name] = { +# "failed": result.failed, +# "subtask_result": result.result, +# } + +# if self.data[task.name][host.name].get("failed"): +# self.data[host.name] = { +# "failed": True, +# "subtask_result": result.result, +# } +# elif host.name not in self.data: +# self.data[host.name] = { +# "platform": host.platform, +# "manufacturer": host.platform.split("_")[0].title() if host.platform else "PLACEHOLDER", +# "network_driver": host.platform, +# } + +# if host.platform in ["cisco_ios", "cisco_xe"]: +# formatted_data = format_ob_data_ios(host, result) +# elif host.platform == "cisco_nxos": +# formatted_data = format_ob_data_nxos(host, result) +# elif host.platform == "juniper_junos": +# formatted_data = format_ob_data_junos(host, result) +# else: +# formatted_data = {} +# self.logger.info(f"No formatter for platform: {host.platform}.", extra={"object": task.host}) + +# self.data[host.name].update(formatted_data) + +# def subtask_instance_started(self, task: Task, host: Host) -> None: +# """Processor for Logging on SubTask Start.""" +# self.logger.info(f"Subtask starting {task.name}.", extra={"object": task.host}) +# self.data[task.name] = {} +# # self.data[task.name][host.name] = {"started": True} class ProcessorDONew(BaseLoggingProcessor): @@ -115,23 +111,6 @@ def __init__(self, logger, command_outputs): self.logger = logger self.data: Dict = command_outputs - # def task_started(self, task: Task) -> None: - # """Boilerplate Nornir processor for task_started.""" - # self.data[task.name] = {} - # # self.data[task.name]["started"] = True - # self.logger.info(f"Task Name: {task.name} started") - - # def task_completed(self, task: Task, result: AggregatedResult) -> None: - # """Boilerplate Nornir processor for task_instance_completed.""" - # # self.data[task.name]["completed"] = True - # self.logger.info(f"Task Name: {task.name} completed") - - # def task_instance_started(self, task: Task, host: Host) -> None: - # """Processor for Logging on Task Start.""" - # self.logger.info(f"Starting {task.name}.", extra={"object": task.host}) - # self.data[host.name] = {task.name: ""} - # # self.data[task.name][host.name] = {} - def task_instance_completed(self, task: Task, host: Host, result: MultiResult) -> None: """Nornir processor task completion for OS upgrades. @@ -155,11 +134,6 @@ def task_instance_completed(self, task: Task, host: Host, result: MultiResult) - else: self.logger.info(f"task_instance_completed Task Name: {task.name} Task Result: {result.result}", extra={"object": task.host}) - # self.data[host.name][task.name] = { - # "completed": True, - # "failed": result.failed, - # } - def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult) -> None: """Processor for Logging on SubTask Completed.""" self.logger.info(f"subtask_instance_completed Subtask completed {task.name}.", extra={"object": task.host}) @@ -167,20 +141,18 @@ def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult self.data[host.name].update({ "failed": result.failed, - # "subtask_result": result.result, }) formatted_data = extract_show_data(host, result) print(f"current formated data = {formatted_data}") for k, v in formatted_data.items(): self.data[host.name][k] = v - # self.data[host.name].update(formatted_data) def subtask_instance_started(self, task: Task, host: Host) -> None: # show command start """Processor for Logging on SubTask Start.""" self.logger.info(f"subtask_instance_started Subtask starting {task.name}.", extra={"object": task.host}) - self.data[host.name] = { - "platform": host.platform, - "manufacturer": host.platform.split("_")[0].title() if host.platform else "PLACEHOLDER", - "network_driver": host.platform, - } - # self.data[host.name].update({task.name:{}}) + if not self.data.get(host.name): + self.data[host.name] = { + "platform": host.platform, + "manufacturer": host.platform.split("_")[0].title() if host.platform else "PLACEHOLDER", + "network_driver": host.platform, + } diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index 747a8800..8e00699c 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -28,7 +28,6 @@ def load_yaml_datafile(filename, config=None): jinja_env.filters = engines["jinja"].env.filters template = jinja_env.get_template(filename) populated = template.render(config) - print(populated) return yaml.safe_load(populated) @@ -44,7 +43,8 @@ def extract_show_data(host, multi_result): host_platform = host.platform if host_platform == "cisco_xe": host_platform = "cisco_ios" - command_jpaths = load_yaml_datafile(f"{host_platform}.yml", config={"host_info": host}) + command_jpaths = host.data["platform_parsing_info"] + result_dict = {} for default_dict_field, command_info in command_jpaths['device_onboarding'].items(): if command_info["command"] == multi_result[0].name: @@ -54,180 +54,180 @@ def extract_show_data(host, multi_result): result_dict[default_dict_field] = extracted_value return result_dict -from nautobot_device_onboarding.constants import ( - CISCO_INTERFACE_ABBREVIATIONS, - CISCO_TO_NAUTOBOT_INTERFACE_TYPE, - TAGGED_INTERFACE_TYPES, -) - - -def format_ob_data_ios(host, result): - """Format the data for onboarding IOS devices.""" - primary_ip4 = host.name - formatted_data = {} - - for r in result: - if r.name == "show inventory": - device_type = r.result[0].get("pid") - formatted_data["device_type"] = device_type - elif r.name == "show version": - hostname = r.result[0].get("hostname") - serial = r.result[0].get("serial") - formatted_data["hostname"] = hostname - formatted_data["serial"] = serial[0] - elif r.name == "show interfaces": - show_interfaces = r.result - for interface in show_interfaces: - if interface.get("ip_address") == primary_ip4: - mask_length = interface.get("prefix_length") - interface_name = interface.get("interface") - formatted_data["mask_length"] = mask_length - formatted_data["mgmt_interface"] = interface_name - - return formatted_data - - -def format_ob_data_nxos(host, result): - """Format the data for onboarding NXOS devices.""" - primary_ip4 = host.name - formatted_data = {} - - for r in result: - if r.name == "show inventory": - # TODO: Add check for PID when textfsm template is fixed - pass - elif r.name == "show version": - device_type = r.result[0].get("platform") - formatted_data["device_type"] = device_type - hostname = r.result[0].get("hostname") - serial = r.result[0].get("serial") - formatted_data["hostname"] = hostname - if serial: - formatted_data["serial"] = serial - else: - formatted_data["serial"] = "" - elif r.name == "show interface": - show_interfaces = r.result - print(f"show interfaces {show_interfaces}") - for interface in show_interfaces: - if interface.get("ip_address") == primary_ip4: - mask_length = interface.get("prefix_length") - interface_name = interface.get("interface") - formatted_data["mask_length"] = mask_length - formatted_data["mgmt_interface"] = interface_name - break - return formatted_data - - return formatted_data - -def format_ob_data_junos(host, result): - """Format the data for onboarding Juniper JUNOS devices.""" - primary_ip4 = host.name - formatted_data = {} - - for r in result: - if r.name == "show version": - device_type = r.result[0].get("model") - formatted_data["device_type"] = device_type - hostname = r.result[0].get("hostname") - serial = "USASR24490" - # serial = r.result[0].get("serial") - formatted_data["hostname"] = hostname - if serial: - formatted_data["serial"] = serial - else: - formatted_data["serial"] = "" - elif r.name == "show interfaces": - show_interfaces = r.result - print(f"show interfaces {show_interfaces}") - for interface in show_interfaces: - if interface.get("local") == primary_ip4: - print(interface.get("destination")) - mask_length = interface.get("destination").split("/")[1] - print(f"interface mask {mask_length}") - interface_name = interface.get("interface") - formatted_data["mask_length"] = mask_length - formatted_data["mgmt_interface"] = interface_name - break - - return formatted_data - - -def normalize_interface_name(interface_name): - """Normalize interface names.""" - for interface_abbreviation, interface_full in CISCO_INTERFACE_ABBREVIATIONS.items(): - if interface_name.startswith(interface_abbreviation): - interface_name = interface_name.replace(interface_abbreviation, interface_full, 1) - break - return interface_name - - -def normalize_interface_type(interface_type): - """Normalize interface types.""" - if interface_type in CISCO_TO_NAUTOBOT_INTERFACE_TYPE: - return CISCO_TO_NAUTOBOT_INTERFACE_TYPE[interface_type] - return "other" - - -def normalize_tagged_interface(tagged_interface): - """Normalize tagged interface types.""" - if tagged_interface in TAGGED_INTERFACE_TYPES: - return TAGGED_INTERFACE_TYPES[tagged_interface] - return "" - - -def format_ni_data_cisco_ios(command, command_result): - """Format cisco_ios data.""" - all_results = {} - # command = ["show version", "show interfaces", "show vlan", "show interfaces switchport"] - for host_name, result in command_result.items(): - if host_name not in all_results: - all_results[host_name] = {"interfaces": {}, "serial": ""} - - if command == "show version": - serial_info = result.result[0] - serial_number = serial_info.get("serial") - all_results[host_name]["serial"] = serial_number[0] - elif command == "show interfaces": - print(f"Interfaces: {result.result}") - for interface_info in result.result: - interface_name = interface_info.get("interface") - media_type = interface_info.get("media_type") - hardware_type = interface_info.get("hardware_type") - mtu = interface_info.get("mtu") - description = interface_info.get("description") - mac_address = interface_info.get("mac_address") - link_status = interface_info.get("link_status") - - if link_status == "up": - link_status = True - else: - link_status = False - - type = "other" - if hardware_type == "EtherChannel": - type = "lag" - elif hardware_type == "Ethernet SVI": - type = "virtual" - elif media_type == "10/100/1000BaseTX": - type = "100base-tx" - else: - type = "other" - - all_results[host_name]["interfaces"][interface_name] = { - "mtu": mtu, - "type": type, - "media_type": media_type, - "hardware_type": hardware_type, - "description": description, - "mac_address": mac_address, - "enabled": link_status, - } - elif command == "show vlan": - print(f"Vlan: {result.result}") - elif command == "show interfaces switchport": - for interface_info in result.result: - print(f"Interfaces switchport: {result.result}") - interface_mode = interface_info.get("admin_mode") - access_vlan = interface_info.get("access_vlan") - return all_results +# from nautobot_device_onboarding.constants import ( +# CISCO_INTERFACE_ABBREVIATIONS, +# CISCO_TO_NAUTOBOT_INTERFACE_TYPE, +# TAGGED_INTERFACE_TYPES, +# ) + + +# def format_ob_data_ios(host, result): +# """Format the data for onboarding IOS devices.""" +# primary_ip4 = host.name +# formatted_data = {} + +# for r in result: +# if r.name == "show inventory": +# device_type = r.result[0].get("pid") +# formatted_data["device_type"] = device_type +# elif r.name == "show version": +# hostname = r.result[0].get("hostname") +# serial = r.result[0].get("serial") +# formatted_data["hostname"] = hostname +# formatted_data["serial"] = serial[0] +# elif r.name == "show interfaces": +# show_interfaces = r.result +# for interface in show_interfaces: +# if interface.get("ip_address") == primary_ip4: +# mask_length = interface.get("prefix_length") +# interface_name = interface.get("interface") +# formatted_data["mask_length"] = mask_length +# formatted_data["mgmt_interface"] = interface_name + +# return formatted_data + + +# def format_ob_data_nxos(host, result): +# """Format the data for onboarding NXOS devices.""" +# primary_ip4 = host.name +# formatted_data = {} + +# for r in result: +# if r.name == "show inventory": +# # TODO: Add check for PID when textfsm template is fixed +# pass +# elif r.name == "show version": +# device_type = r.result[0].get("platform") +# formatted_data["device_type"] = device_type +# hostname = r.result[0].get("hostname") +# serial = r.result[0].get("serial") +# formatted_data["hostname"] = hostname +# if serial: +# formatted_data["serial"] = serial +# else: +# formatted_data["serial"] = "" +# elif r.name == "show interface": +# show_interfaces = r.result +# print(f"show interfaces {show_interfaces}") +# for interface in show_interfaces: +# if interface.get("ip_address") == primary_ip4: +# mask_length = interface.get("prefix_length") +# interface_name = interface.get("interface") +# formatted_data["mask_length"] = mask_length +# formatted_data["mgmt_interface"] = interface_name +# break +# return formatted_data + +# return formatted_data + +# def format_ob_data_junos(host, result): +# """Format the data for onboarding Juniper JUNOS devices.""" +# primary_ip4 = host.name +# formatted_data = {} + +# for r in result: +# if r.name == "show version": +# device_type = r.result[0].get("model") +# formatted_data["device_type"] = device_type +# hostname = r.result[0].get("hostname") +# serial = "USASR24490" +# # serial = r.result[0].get("serial") +# formatted_data["hostname"] = hostname +# if serial: +# formatted_data["serial"] = serial +# else: +# formatted_data["serial"] = "" +# elif r.name == "show interfaces": +# show_interfaces = r.result +# print(f"show interfaces {show_interfaces}") +# for interface in show_interfaces: +# if interface.get("local") == primary_ip4: +# print(interface.get("destination")) +# mask_length = interface.get("destination").split("/")[1] +# print(f"interface mask {mask_length}") +# interface_name = interface.get("interface") +# formatted_data["mask_length"] = mask_length +# formatted_data["mgmt_interface"] = interface_name +# break + +# return formatted_data + + +# def normalize_interface_name(interface_name): +# """Normalize interface names.""" +# for interface_abbreviation, interface_full in CISCO_INTERFACE_ABBREVIATIONS.items(): +# if interface_name.startswith(interface_abbreviation): +# interface_name = interface_name.replace(interface_abbreviation, interface_full, 1) +# break +# return interface_name + + +# def normalize_interface_type(interface_type): +# """Normalize interface types.""" +# if interface_type in CISCO_TO_NAUTOBOT_INTERFACE_TYPE: +# return CISCO_TO_NAUTOBOT_INTERFACE_TYPE[interface_type] +# return "other" + + +# def normalize_tagged_interface(tagged_interface): +# """Normalize tagged interface types.""" +# if tagged_interface in TAGGED_INTERFACE_TYPES: +# return TAGGED_INTERFACE_TYPES[tagged_interface] +# return "" + + +# def format_ni_data_cisco_ios(command, command_result): +# """Format cisco_ios data.""" +# all_results = {} +# # command = ["show version", "show interfaces", "show vlan", "show interfaces switchport"] +# for host_name, result in command_result.items(): +# if host_name not in all_results: +# all_results[host_name] = {"interfaces": {}, "serial": ""} + +# if command == "show version": +# serial_info = result.result[0] +# serial_number = serial_info.get("serial") +# all_results[host_name]["serial"] = serial_number[0] +# elif command == "show interfaces": +# print(f"Interfaces: {result.result}") +# for interface_info in result.result: +# interface_name = interface_info.get("interface") +# media_type = interface_info.get("media_type") +# hardware_type = interface_info.get("hardware_type") +# mtu = interface_info.get("mtu") +# description = interface_info.get("description") +# mac_address = interface_info.get("mac_address") +# link_status = interface_info.get("link_status") + +# if link_status == "up": +# link_status = True +# else: +# link_status = False + +# type = "other" +# if hardware_type == "EtherChannel": +# type = "lag" +# elif hardware_type == "Ethernet SVI": +# type = "virtual" +# elif media_type == "10/100/1000BaseTX": +# type = "100base-tx" +# else: +# type = "other" + +# all_results[host_name]["interfaces"][interface_name] = { +# "mtu": mtu, +# "type": type, +# "media_type": media_type, +# "hardware_type": hardware_type, +# "description": description, +# "mac_address": mac_address, +# "enabled": link_status, +# } +# elif command == "show vlan": +# print(f"Vlan: {result.result}") +# elif command == "show interfaces switchport": +# for interface_info in result.result: +# print(f"Interfaces switchport: {result.result}") +# interface_mode = interface_info.get("admin_mode") +# access_vlan = interface_info.get("access_vlan") +# return all_results diff --git a/nautobot_device_onboarding/utils/inventory_creator.py b/nautobot_device_onboarding/utils/inventory_creator.py index 7f821c5f..2f2b080e 100755 --- a/nautobot_device_onboarding/utils/inventory_creator.py +++ b/nautobot_device_onboarding/utils/inventory_creator.py @@ -2,10 +2,18 @@ from django.conf import settings from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices +from nautobot_device_onboarding.exceptions import OnboardException +from nautobot_device_onboarding.utils.formatter import load_yaml_datafile from netmiko import SSHDetect from nornir.core.inventory import ConnectionOptions, Host -from nautobot_device_onboarding.exceptions import OnboardException + +def _get_platform_parsing_info(host_platform, data): + """Open and load yaml file.""" + if host_platform == "cisco_xe": + host_platform = "cisco_ios" + yaml_parsing_info = load_yaml_datafile(f"{host_platform}.yml", config=data) + return yaml_parsing_info def _parse_credentials(credentials): @@ -69,6 +77,8 @@ def _set_inventory(host_ip, platform, port, secrets_group): else: platform = guess_netmiko_device_type(host_ip, username, password, port) + parsing_info = _get_platform_parsing_info(platform, data={"host_info": host_ip}) + host = Host( name=host_ip, hostname=host_ip, @@ -85,6 +95,7 @@ def _set_inventory(host_ip, platform, port, secrets_group): platform=platform, ) }, + data={"platform_parsing_info": parsing_info} ) inv.update({host_ip: host}) From 40935cd28b3600127695049761b14087aa65974f Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Fri, 16 Feb 2024 12:28:59 -0600 Subject: [PATCH 070/225] new framework for parsing outputs --- .../command_mappers/arista_eos.yml | 15 +- .../command_mappers/cisco_ios.yml | 89 ++++++++ .../command_mappers/cisco_nxos.yml | 1 + .../command_mappers/juniper_junos.yml | 1 + nautobot_device_onboarding/constants.py | 39 ---- .../adapters/network_importer_adapters.py | 4 +- .../diffsync/adapters/onboarding_adapters.py | 2 +- .../models/network_importer_models.py | 3 +- .../diffsync/models/onboarding_models.py | 2 +- nautobot_device_onboarding/jobs.py | 121 ++--------- .../nornir_plays/command_getter.py | 8 +- .../nornir_plays/processor.py | 122 ++--------- .../utils/diffsync_utils.py | 3 +- nautobot_device_onboarding/utils/formatter.py | 202 ++---------------- .../utils/inventory_creator.py | 7 +- poetry.lock | 9 +- pyproject.toml | 1 + 17 files changed, 167 insertions(+), 462 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/arista_eos.yml b/nautobot_device_onboarding/command_mappers/arista_eos.yml index 6df0be4b..fb2f75d0 100755 --- a/nautobot_device_onboarding/command_mappers/arista_eos.yml +++ b/nautobot_device_onboarding/command_mappers/arista_eos.yml @@ -1,17 +1,18 @@ --- device_onboarding: + use_textfsm: true hostname: jpath: "[*].hostname" - command: "show version" + command: "show hostname" serial: command: "show version" - jpath: "[*].serial" + jpath: "[*].serial_number" device_type: command: "show version" - jpath: "[*].platform" + jpath: "[*].model" mgmt_interface: - command: "show interface" - jpath: "[?ip_address=='{{ host_info }}'].interface" + command: "show ip interface brief" + jpath: "[?interface=='Management1'].interface" mask_length: - command: "show interface" - jpath: "[?ip_address=='{{ host_info }}'].prefix_length" + command: "show ip interface brief" + jpath: "[?interface=='Management1'].ip_address" diff --git a/nautobot_device_onboarding/command_mappers/cisco_ios.yml b/nautobot_device_onboarding/command_mappers/cisco_ios.yml index 6b90f851..013ca178 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_ios.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_ios.yml @@ -1,5 +1,6 @@ --- device_onboarding: + use_textfsm: true hostname: jpath: "[*].hostname" command: "show version" @@ -15,3 +16,91 @@ device_onboarding: mask_length: command: "show interfaces" jpath: "[?ip_address=='{{ host_info }}'].prefix_length" + + + # commands = ["show version", "show interfaces", "show vlan", "show interfaces switchport"] + # all_results = {} + + # for command in commands: + # command_result = nornir_obj.run(task=netmiko_send_command, command_string=command, use_textfsm=True) + # # all_results = format_ni_data_cisco_ios(command=command,command_result=command_result) + # for host_name, result in command_result.items(): + # if command_result.failed: + # failed_results = {host_name: {"Failed": True, "subtask_result": result.result}} + # return failed_results + # if host_name not in all_results: + # all_results[host_name] = {"interfaces": {}, "serial": ""} + + # if command == "show version": + # self.logger.info(f"Show version: {result.result}") + # serial_info = result.result[0] + # self.logger.info(f"Serial Info: {serial_info}") + # serial_number = serial_info.get("serial") + # all_results[host_name]["serial"] = serial_number[0] + # elif command == "show interfaces": + # self.logger.info(f"Interfaces: {result.result}") + # for interface_info in result.result: + # self.logger.info(f"Interface Info: {interface_info}") + # interface_name = interface_info.get("interface") + # # media_type = interface_info.get("media_type") + # hardware_type = interface_info.get("hardware_type") + # mtu = interface_info.get("mtu") + # description = interface_info.get("description") + # mac_address = interface_info.get("mac_address") + # link_status = interface_info.get("link_status") + # ip_address = interface_info.get("ip_address") + # mask_length = interface_info.get("prefix_length") + + # link_status = bool(link_status == "up") + + # interface_type = normalize_interface_type(hardware_type) + + # all_results[host_name]["interfaces"][interface_name] = { + # "mtu": mtu, + # "type": interface_type, + # "description": description, + # "mac_address": mac_address, + # "enabled": link_status, + # "ip_addresses": [{"host": ip_address, "mask_length": mask_length}], + # } + # elif command == "show vlan": + # vlan_id_name_map = {} + # self.logger.info(f"Vlan: {result.result}") + # for vlan_info in result.result: + # self.logger.info(f"Vlan info: {vlan_info}") + # vlan_id = vlan_info.get("vlan_id") + # vlan_name = vlan_info.get("vlan_name") + # vlan_id_name_map[vlan_id] = vlan_name + # self.logger.info(f"Vlan ID Name Map: {vlan_id_name_map}") + + # elif command == "show interfaces switchport": + # self.logger.info(f"Interfaces Switchport: {result.result}") + # for interface_info in result.result: + # self.logger.info(f"Interface Info: {interface_info}") + # interface_name = normalize_interface_name(interface_info.get("interface")) + # self.logger.info(f"Interface Name: {interface_name}") + # interface_mode = normalize_tagged_interface(interface_info.get("admin_mode")) + # access_vlan = interface_info.get("access_vlan") + # tagged_vlans = interface_info.get("trunking_vlans", []) + # tagged_vlans_list = tagged_vlans[0].split(",") + # self.logger.info(f"tagged_vlans: {tagged_vlans}") + + # if interface_name in all_results[host_name]["interfaces"]: + # all_results[host_name]["interfaces"][interface_name]["mode"] = interface_mode + # all_results[host_name]["interfaces"][interface_name]["access_vlan"] = { + # "vlan_id": access_vlan, + # "vlan_name": vlan_id_name_map.get(access_vlan, ""), + # } + + # # Prepare tagged VLANs info + # tagged_vlans_info = [ + # {"vlan_id": vlan_id, "vlan_name": vlan_id_name_map.get(vlan_id, "Unknown VLAN")} + # for vlan_id in tagged_vlans_list + # if vlan_id in vlan_id_name_map + # ] + # self.logger.info(f"tagged_vlans_info: {tagged_vlans_info}") + # all_results[host_name]["interfaces"][interface_name][ + # "tagged_vlans" + # ] = tagged_vlans_info + # else: + # self.logger.info(f"Interface {interface_name} not found in interfaces list.") diff --git a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml index 6df0be4b..e3e9aa1c 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml @@ -1,5 +1,6 @@ --- device_onboarding: + use_textfsm: true hostname: jpath: "[*].hostname" command: "show version" diff --git a/nautobot_device_onboarding/command_mappers/juniper_junos.yml b/nautobot_device_onboarding/command_mappers/juniper_junos.yml index 6b90f851..f44de5f1 100755 --- a/nautobot_device_onboarding/command_mappers/juniper_junos.yml +++ b/nautobot_device_onboarding/command_mappers/juniper_junos.yml @@ -1,5 +1,6 @@ --- device_onboarding: + use_textfsm: true hostname: jpath: "[*].hostname" command: "show version" diff --git a/nautobot_device_onboarding/constants.py b/nautobot_device_onboarding/constants.py index c44412f6..caccc432 100644 --- a/nautobot_device_onboarding/constants.py +++ b/nautobot_device_onboarding/constants.py @@ -8,42 +8,3 @@ "juniper_junos": "junos", "cisco_xr": "iosxr", } - -# PLATFORM_COMMAND_MAP = { -# "cisco_ios": ["show version", "show inventory", "show interfaces"], -# "cisco_nxos": ["show version", "show inventory", "show interface"], -# "cisco_xe": ["show version", "show inventory", "show interfaces"], -# "juniper_junos": ["show version", "show interfaces", "show chassis hardware"], -# } - -# CISCO_INTERFACE_ABBREVIATIONS = { -# "Fa": "FastEthernet", -# "Gi": "GigabitEthernet", -# "Te": "TenGigabitEthernet", -# "Twe": "TwentyFiveGigE", -# "Fo": "FortyGigabitEthernet", -# "Ap": "AppGigabitEthernet", -# "Lo": "Loopback", -# "Po": "Port-channel", -# "BE": "Bundle-Ether", -# "Vl": "Vlan", -# "Tu": "Tunnel", -# } - -# CISCO_TO_NAUTOBOT_INTERFACE_TYPE = { -# "Fast Ethernet": "100base-tx", -# "EtherChannel": "lag", -# "Gigabit Ethernet": "1000base-tx", -# "Ten Gigabit Ethernet": "10gbase-t", -# "Twenty Five Gigabit Ethernet": "25gbase-t", -# "Forty Gigabit Ethernet": "40gbase-t", -# "AppGigabitEthernet": "40gbase-t", -# "Port-channel": "lag", -# "Ethernet SVI": "virtual", -# } - -# TAGGED_INTERFACE_TYPES = { -# "static access": "access", -# "dynamic auto": "trunk-all", -# "trunk": "trunk", -# } diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index 817ef323..9b87eb60 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -2,8 +2,6 @@ import time -import diffsync -from diffsync.enum import DiffSyncModelFlags from nautobot.apps.choices import JobResultStatusChoices from nautobot.dcim.models import Interface from nautobot.extras.models import Job, JobResult @@ -11,6 +9,8 @@ from nautobot_ssot.contrib import NautobotAdapter from netaddr import EUI, mac_unix_expanded +import diffsync +from diffsync.enum import DiffSyncModelFlags from nautobot_device_onboarding.diffsync.models import network_importer_models diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index 8ac36776..ea36a328 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -2,12 +2,12 @@ import time -import diffsync import netaddr from nautobot.apps.choices import JobResultStatusChoices from nautobot.dcim.models import Device, DeviceType, Manufacturer, Platform from nautobot.extras.models import Job, JobResult +import diffsync from nautobot_device_onboarding.diffsync.models import onboarding_models diff --git a/nautobot_device_onboarding/diffsync/models/network_importer_models.py b/nautobot_device_onboarding/diffsync/models/network_importer_models.py index 5e8d27ba..58c1010e 100644 --- a/nautobot_device_onboarding/diffsync/models/network_importer_models.py +++ b/nautobot_device_onboarding/diffsync/models/network_importer_models.py @@ -1,9 +1,7 @@ """Diffsync models.""" -from dataclasses import dataclass from typing import List, Optional -from diffsync import DiffSync, DiffSyncModel from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist, ValidationError from nautobot.dcim.choices import InterfaceTypeChoices from nautobot.dcim.models import Device, Interface, Location @@ -11,6 +9,7 @@ from nautobot.ipam.models import VLAN, IPAddress, IPAddressToInterface from nautobot_ssot.contrib import NautobotModel +from diffsync import DiffSync, DiffSyncModel from nautobot_device_onboarding.utils import diffsync_utils diff --git a/nautobot_device_onboarding/diffsync/models/onboarding_models.py b/nautobot_device_onboarding/diffsync/models/onboarding_models.py index 2b8cdbb6..be7b1ed1 100644 --- a/nautobot_device_onboarding/diffsync/models/onboarding_models.py +++ b/nautobot_device_onboarding/diffsync/models/onboarding_models.py @@ -2,13 +2,13 @@ from typing import Optional -from diffsync import DiffSyncModel from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist, ValidationError from nautobot.apps.choices import InterfaceTypeChoices from nautobot.dcim.models import Device, DeviceType, Interface, Manufacturer, Platform from nautobot.extras.models import Role, SecretsGroup, Status from nautobot_ssot.contrib import NautobotModel +from diffsync import DiffSyncModel from nautobot_device_onboarding.utils import diffsync_utils diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 17769134..12059604 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -3,7 +3,6 @@ import logging -from diffsync.enum import DiffSyncFlags from django.conf import settings from nautobot.apps.jobs import BooleanVar, IntegerVar, Job, MultiObjectVar, ObjectVar, StringVar from nautobot.core.celery import register_jobs @@ -11,6 +10,13 @@ from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from nautobot.extras.models import Role, SecretsGroup, SecretsGroupAssociation, Status, Tag from nautobot.ipam.models import Namespace +from nautobot_plugin_nornir.constants import NORNIR_SETTINGS +from nautobot_plugin_nornir.plugins.inventory.nautobot_orm import NautobotORMInventory +from nautobot_ssot.jobs.base import DataSource +from nornir import InitNornir +from nornir.core.plugins.inventory import InventoryPluginRegister + +from diffsync.enum import DiffSyncFlags from nautobot_device_onboarding.diffsync.adapters.network_importer_adapters import ( NetworkImporterNautobotAdapter, NetworkImporterNetworkAdapter, @@ -25,21 +31,9 @@ from nautobot_device_onboarding.nornir_plays.command_getter import netmiko_send_commands from nautobot_device_onboarding.nornir_plays.empty_inventory import EmptyInventory from nautobot_device_onboarding.nornir_plays.logger import NornirLogger -from nautobot_device_onboarding.nornir_plays.processor import ProcessorDONew - -# from nautobot_device_onboarding.utils.formatter import ( -# normalize_interface_name, -# normalize_interface_type, -# normalize_tagged_interface, -# ) +from nautobot_device_onboarding.nornir_plays.processor import ProcessorDO from nautobot_device_onboarding.utils.helper import get_job_filter from nautobot_device_onboarding.utils.inventory_creator import _set_inventory -from nautobot_plugin_nornir.constants import NORNIR_SETTINGS -from nautobot_plugin_nornir.plugins.inventory.nautobot_orm import NautobotORMInventory -from nautobot_ssot.jobs.base import DataSource -from nornir import InitNornir -from nornir.core.plugins.inventory import InventoryPluginRegister -from nornir_netmiko import netmiko_send_command InventoryPluginRegister.register("nautobot-inventory", NautobotORMInventory) InventoryPluginRegister.register("empty-inventory", EmptyInventory) @@ -47,7 +41,6 @@ PLUGIN_SETTINGS = settings.PLUGINS_CONFIG["nautobot_device_onboarding"] LOGGER = logging.getLogger(__name__) -# COMMANDS = [] name = "Device Onboarding/Network Importer" # pylint: disable=invalid-name @@ -520,7 +513,7 @@ def run(self, *args, **kwargs): "plugin": "empty-inventory", }, ) as nornir_obj: - nr_with_processors = nornir_obj.with_processors([ProcessorDONew(logger, compiled_results)]) + nr_with_processors = nornir_obj.with_processors([ProcessorDO(logger, compiled_results)]) for entered_ip in self.ip_addresses: single_host_inventory_constructed = _set_inventory( entered_ip, self.platform, self.port, self.secrets_group @@ -577,6 +570,8 @@ class Meta: def run(self, *args, **kwargs): """Process onboarding task from ssot-ni job.""" try: + logger = NornirLogger(self.job_result, log_level=0) + compiled_results = {} qs = get_job_filter(kwargs) with InitNornir( runner=NORNIR_SETTINGS.get("runner"), @@ -587,100 +582,16 @@ def run(self, *args, **kwargs): "credentials_class": NORNIR_SETTINGS.get("credentials"), "queryset": qs, }, + # need to figure out how to inject the platform_yaml_data here into data }, ) as nornir_obj: - commands = ["show version", "show interfaces", "show vlan", "show interfaces switchport"] - all_results = {} - - for command in commands: - command_result = nornir_obj.run(task=netmiko_send_command, command_string=command, use_textfsm=True) - # all_results = format_ni_data_cisco_ios(command=command,command_result=command_result) - for host_name, result in command_result.items(): - if command_result.failed: - failed_results = {host_name: {"Failed": True, "subtask_result": result.result}} - return failed_results - if host_name not in all_results: - all_results[host_name] = {"interfaces": {}, "serial": ""} - - if command == "show version": - self.logger.info(f"Show version: {result.result}") - serial_info = result.result[0] - self.logger.info(f"Serial Info: {serial_info}") - serial_number = serial_info.get("serial") - all_results[host_name]["serial"] = serial_number[0] - elif command == "show interfaces": - self.logger.info(f"Interfaces: {result.result}") - for interface_info in result.result: - self.logger.info(f"Interface Info: {interface_info}") - interface_name = interface_info.get("interface") - # media_type = interface_info.get("media_type") - hardware_type = interface_info.get("hardware_type") - mtu = interface_info.get("mtu") - description = interface_info.get("description") - mac_address = interface_info.get("mac_address") - link_status = interface_info.get("link_status") - ip_address = interface_info.get("ip_address") - mask_length = interface_info.get("prefix_length") - - link_status = bool(link_status == "up") - - interface_type = normalize_interface_type(hardware_type) - - all_results[host_name]["interfaces"][interface_name] = { - "mtu": mtu, - "type": interface_type, - "description": description, - "mac_address": mac_address, - "enabled": link_status, - "ip_addresses": [{"host": ip_address, "mask_length": mask_length}], - } - elif command == "show vlan": - vlan_id_name_map = {} - self.logger.info(f"Vlan: {result.result}") - for vlan_info in result.result: - self.logger.info(f"Vlan info: {vlan_info}") - vlan_id = vlan_info.get("vlan_id") - vlan_name = vlan_info.get("vlan_name") - vlan_id_name_map[vlan_id] = vlan_name - self.logger.info(f"Vlan ID Name Map: {vlan_id_name_map}") - - elif command == "show interfaces switchport": - self.logger.info(f"Interfaces Switchport: {result.result}") - for interface_info in result.result: - self.logger.info(f"Interface Info: {interface_info}") - interface_name = normalize_interface_name(interface_info.get("interface")) - self.logger.info(f"Interface Name: {interface_name}") - interface_mode = normalize_tagged_interface(interface_info.get("admin_mode")) - access_vlan = interface_info.get("access_vlan") - tagged_vlans = interface_info.get("trunking_vlans", []) - tagged_vlans_list = tagged_vlans[0].split(",") - self.logger.info(f"tagged_vlans: {tagged_vlans}") - - if interface_name in all_results[host_name]["interfaces"]: - all_results[host_name]["interfaces"][interface_name]["mode"] = interface_mode - all_results[host_name]["interfaces"][interface_name]["access_vlan"] = { - "vlan_id": access_vlan, - "vlan_name": vlan_id_name_map.get(access_vlan, ""), - } - - # Prepare tagged VLANs info - tagged_vlans_info = [ - {"vlan_id": vlan_id, "vlan_name": vlan_id_name_map.get(vlan_id, "Unknown VLAN")} - for vlan_id in tagged_vlans_list - if vlan_id in vlan_id_name_map - ] - self.logger.info(f"tagged_vlans_info: {tagged_vlans_info}") - all_results[host_name]["interfaces"][interface_name][ - "tagged_vlans" - ] = tagged_vlans_info - else: - self.logger.info(f"Interface {interface_name} not found in interfaces list.") - + # commands = ["show version", "show interfaces", "show vlan", "show interfaces switchport"] + nr_with_processors = nornir_obj.with_processors([ProcessorDO(logger, compiled_results)]) + nr_with_processors.run(task=netmiko_send_commands) except Exception as err: # pylint: disable=broad-exception-caught self.logger.info("Error: %s", err) return err - - return all_results + return compiled_results jobs = [OnboardingTask, SSOTDeviceOnboarding, SSOTNetworkImporter, CommandGetterDO, CommandGetterNetworkImporter] diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index 9a999ebb..69262573 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -7,8 +7,9 @@ def _get_commands_to_run(yaml_parsed_info): """Load yaml file and look up all commands that need to be run.""" commands = [] - for _, value in yaml_parsed_info['device_onboarding'].items(): - commands.append(value['command']) + for key, value in yaml_parsed_info["device_onboarding"].items(): + if not key == "use_textfsm": + commands.append(value["command"]) return list(set(commands)) @@ -16,4 +17,5 @@ def netmiko_send_commands(task: Task): """Run commands specified in PLATFORM_COMMAND_MAP.""" commands = _get_commands_to_run(task.host.data["platform_parsing_info"]) for command in commands: - task.run(task=netmiko_send_command, name=command, command_string=command, use_textfsm=True) + command_use_textfsm = task.host.data["platform_parsing_info"]["device_onboarding"]["use_textfsm"] + task.run(task=netmiko_send_command, name=command, command_string=command, use_textfsm=command_use_textfsm) diff --git a/nautobot_device_onboarding/nornir_plays/processor.py b/nautobot_device_onboarding/nornir_plays/processor.py index 5054512b..5c0fd537 100755 --- a/nautobot_device_onboarding/nornir_plays/processor.py +++ b/nautobot_device_onboarding/nornir_plays/processor.py @@ -2,108 +2,15 @@ from typing import Dict -from nautobot_device_onboarding.utils.formatter import ( # format_ob_data_ios,; format_ob_data_junos,; format_ob_data_nxos, - extract_show_data, -) from nornir.core.inventory import Host -from nornir.core.task import AggregatedResult, MultiResult, Task +from nornir.core.task import MultiResult, Task from nornir_nautobot.exceptions import NornirNautobotException from nornir_nautobot.plugins.processors import BaseLoggingProcessor -# class ProcessorDO(BaseLoggingProcessor): -# """Processor class for Device Onboarding jobs.""" +from nautobot_device_onboarding.utils.formatter import extract_show_data -# def __init__(self, logger, command_outputs): -# """Set logging facility.""" -# self.logger = logger -# self.data: Dict = command_outputs -# def task_started(self, task: Task) -> None: -# """Boilerplate Nornir processor for task_started.""" -# self.data[task.name] = {} -# # self.data[task.name]["started"] = True -# self.logger.info(f"Task Name: {task.name} started") - -# def task_completed(self, task: Task, result: AggregatedResult) -> None: -# """Boilerplate Nornir processor for task_instance_completed.""" -# # self.data[task.name]["completed"] = True -# self.logger.info(f"Task Name: {task.name} completed") - -# def task_instance_started(self, task: Task, host: Host) -> None: -# """Processor for Logging on Task Start.""" -# self.logger.info(f"Starting {task.name}.", extra={"object": task.host}) -# self.data[task.name][host.name] = {} - -# def task_instance_completed(self, task: Task, host: Host, result: MultiResult) -> None: -# """Nornir processor task completion for OS upgrades. - -# Args: -# task (Task): Nornir task individual object -# host (Host): Host object with Nornir -# result (MultiResult): Result from Nornir task - -# Returns: -# None -# """ -# # Complex logic to see if the task exception is expected, which is depicted by -# # a sub task raising a NornirNautobotException. -# if result.failed: -# for level_1_result in result: -# if hasattr(level_1_result, "exception") and hasattr(level_1_result.exception, "result"): -# for level_2_result in level_1_result.exception.result: # type: ignore -# if isinstance(level_2_result.exception, NornirNautobotException): -# return -# self.logger.critical(f"{task.name} failed: {result.exception}", extra={"object": task.host}) -# else: -# self.logger.info(f"Task Name: {task.name} Task Result: {result.result}", extra={"object": task.host}) - -# # self.data[task.name][host.name] = { -# # "completed": True, -# # "failed": result.failed, -# # } - -# def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult) -> None: -# """Processor for Logging on SubTask Completed.""" -# self.logger.info(f"Subtask completed {task.name}.", extra={"object": task.host}) -# self.logger.info(f"Subtask result {result.result}.", extra={"object": task.host}) - -# self.data[task.name][host.name] = { -# "failed": result.failed, -# "subtask_result": result.result, -# } - -# if self.data[task.name][host.name].get("failed"): -# self.data[host.name] = { -# "failed": True, -# "subtask_result": result.result, -# } -# elif host.name not in self.data: -# self.data[host.name] = { -# "platform": host.platform, -# "manufacturer": host.platform.split("_")[0].title() if host.platform else "PLACEHOLDER", -# "network_driver": host.platform, -# } - -# if host.platform in ["cisco_ios", "cisco_xe"]: -# formatted_data = format_ob_data_ios(host, result) -# elif host.platform == "cisco_nxos": -# formatted_data = format_ob_data_nxos(host, result) -# elif host.platform == "juniper_junos": -# formatted_data = format_ob_data_junos(host, result) -# else: -# formatted_data = {} -# self.logger.info(f"No formatter for platform: {host.platform}.", extra={"object": task.host}) - -# self.data[host.name].update(formatted_data) - -# def subtask_instance_started(self, task: Task, host: Host) -> None: -# """Processor for Logging on SubTask Start.""" -# self.logger.info(f"Subtask starting {task.name}.", extra={"object": task.host}) -# self.data[task.name] = {} -# # self.data[task.name][host.name] = {"started": True} - - -class ProcessorDONew(BaseLoggingProcessor): +class ProcessorDO(BaseLoggingProcessor): """Processor class for Device Onboarding jobs.""" def __init__(self, logger, command_outputs): @@ -132,18 +39,23 @@ def task_instance_completed(self, task: Task, host: Host, result: MultiResult) - return self.logger.critical(f"{task.name} failed: {result.exception}", extra={"object": task.host}) else: - self.logger.info(f"task_instance_completed Task Name: {task.name} Task Result: {result.result}", extra={"object": task.host}) + self.logger.info( + f"task_instance_completed Task Name: {task.name} Task Result: {result.result}", + extra={"object": task.host}, + ) def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult) -> None: """Processor for Logging on SubTask Completed.""" self.logger.info(f"subtask_instance_completed Subtask completed {task.name}.", extra={"object": task.host}) self.logger.info(f"subtask_instance_completed Subtask result {result.result}.", extra={"object": task.host}) - self.data[host.name].update({ - "failed": result.failed, - }) + self.data[host.name].update( + { + "failed": result.failed, + } + ) formatted_data = extract_show_data(host, result) - print(f"current formated data = {formatted_data}") + # revist should be able to just update self.data with full formatted_data for k, v in formatted_data.items(): self.data[host.name][k] = v @@ -152,7 +64,7 @@ def subtask_instance_started(self, task: Task, host: Host) -> None: # show comm self.logger.info(f"subtask_instance_started Subtask starting {task.name}.", extra={"object": task.host}) if not self.data.get(host.name): self.data[host.name] = { - "platform": host.platform, - "manufacturer": host.platform.split("_")[0].title() if host.platform else "PLACEHOLDER", - "network_driver": host.platform, - } + "platform": host.platform, + "manufacturer": host.platform.split("_")[0].title() if host.platform else "PLACEHOLDER", + "network_driver": host.platform, + } diff --git a/nautobot_device_onboarding/utils/diffsync_utils.py b/nautobot_device_onboarding/utils/diffsync_utils.py index afb11aca..a9193216 100644 --- a/nautobot_device_onboarding/utils/diffsync_utils.py +++ b/nautobot_device_onboarding/utils/diffsync_utils.py @@ -4,7 +4,6 @@ from django.core.exceptions import ObjectDoesNotExist, ValidationError from nautobot.apps.choices import PrefixTypeChoices -from nautobot.extras.models import Status from nautobot.ipam.models import IPAddress, Prefix @@ -35,7 +34,7 @@ def get_or_create_prefix(host, mask_length, default_status, namespace, job=None) def get_or_create_ip_address(host, mask_length, namespace, default_ip_status, default_prefix_status, job=None): """Attempt to get a Nautobot IPAddress, create a new one if necessary.""" ip_address = None - default_status = Status.objects.get(name="Active") + try: ip_address = IPAddress.objects.get( host=host, diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index 8e00699c..37dc944d 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -1,4 +1,5 @@ """Formatter.""" + import os import yaml @@ -32,13 +33,11 @@ def load_yaml_datafile(filename, config=None): def extract_show_data(host, multi_result): - """_summary_ + """Take a result of show command and extra specific needed data. Args: - host (_type_): _description_ - result (_type_): _description_ - - result is a MultiResult Nornir Object for a single host. + host (host): host from task + multi_result (multiResult): multiresult object from nornir """ host_platform = host.platform if host_platform == "cisco_xe": @@ -46,188 +45,13 @@ def extract_show_data(host, multi_result): command_jpaths = host.data["platform_parsing_info"] result_dict = {} - for default_dict_field, command_info in command_jpaths['device_onboarding'].items(): - if command_info["command"] == multi_result[0].name: - extracted_value = extract_data_from_json(multi_result[0].result, command_info['jpath']) - if isinstance(extracted_value, list) and len(extracted_value) == 1: - extracted_value = extracted_value[0] - result_dict[default_dict_field] = extracted_value + for default_dict_field, command_info in command_jpaths["device_onboarding"].items(): + if not default_dict_field == "use_textfsm": + if command_info["command"] == multi_result[0].name: + extracted_value = extract_data_from_json(multi_result[0].result, command_info["jpath"]) + if isinstance(extracted_value, list) and len(extracted_value) == 1: + extracted_value = extracted_value[0] + if "/" in extracted_value and default_dict_field == "mask_length": + extracted_value = extracted_value.split("/")[1] + result_dict[default_dict_field] = extracted_value return result_dict - -# from nautobot_device_onboarding.constants import ( -# CISCO_INTERFACE_ABBREVIATIONS, -# CISCO_TO_NAUTOBOT_INTERFACE_TYPE, -# TAGGED_INTERFACE_TYPES, -# ) - - -# def format_ob_data_ios(host, result): -# """Format the data for onboarding IOS devices.""" -# primary_ip4 = host.name -# formatted_data = {} - -# for r in result: -# if r.name == "show inventory": -# device_type = r.result[0].get("pid") -# formatted_data["device_type"] = device_type -# elif r.name == "show version": -# hostname = r.result[0].get("hostname") -# serial = r.result[0].get("serial") -# formatted_data["hostname"] = hostname -# formatted_data["serial"] = serial[0] -# elif r.name == "show interfaces": -# show_interfaces = r.result -# for interface in show_interfaces: -# if interface.get("ip_address") == primary_ip4: -# mask_length = interface.get("prefix_length") -# interface_name = interface.get("interface") -# formatted_data["mask_length"] = mask_length -# formatted_data["mgmt_interface"] = interface_name - -# return formatted_data - - -# def format_ob_data_nxos(host, result): -# """Format the data for onboarding NXOS devices.""" -# primary_ip4 = host.name -# formatted_data = {} - -# for r in result: -# if r.name == "show inventory": -# # TODO: Add check for PID when textfsm template is fixed -# pass -# elif r.name == "show version": -# device_type = r.result[0].get("platform") -# formatted_data["device_type"] = device_type -# hostname = r.result[0].get("hostname") -# serial = r.result[0].get("serial") -# formatted_data["hostname"] = hostname -# if serial: -# formatted_data["serial"] = serial -# else: -# formatted_data["serial"] = "" -# elif r.name == "show interface": -# show_interfaces = r.result -# print(f"show interfaces {show_interfaces}") -# for interface in show_interfaces: -# if interface.get("ip_address") == primary_ip4: -# mask_length = interface.get("prefix_length") -# interface_name = interface.get("interface") -# formatted_data["mask_length"] = mask_length -# formatted_data["mgmt_interface"] = interface_name -# break -# return formatted_data - -# return formatted_data - -# def format_ob_data_junos(host, result): -# """Format the data for onboarding Juniper JUNOS devices.""" -# primary_ip4 = host.name -# formatted_data = {} - -# for r in result: -# if r.name == "show version": -# device_type = r.result[0].get("model") -# formatted_data["device_type"] = device_type -# hostname = r.result[0].get("hostname") -# serial = "USASR24490" -# # serial = r.result[0].get("serial") -# formatted_data["hostname"] = hostname -# if serial: -# formatted_data["serial"] = serial -# else: -# formatted_data["serial"] = "" -# elif r.name == "show interfaces": -# show_interfaces = r.result -# print(f"show interfaces {show_interfaces}") -# for interface in show_interfaces: -# if interface.get("local") == primary_ip4: -# print(interface.get("destination")) -# mask_length = interface.get("destination").split("/")[1] -# print(f"interface mask {mask_length}") -# interface_name = interface.get("interface") -# formatted_data["mask_length"] = mask_length -# formatted_data["mgmt_interface"] = interface_name -# break - -# return formatted_data - - -# def normalize_interface_name(interface_name): -# """Normalize interface names.""" -# for interface_abbreviation, interface_full in CISCO_INTERFACE_ABBREVIATIONS.items(): -# if interface_name.startswith(interface_abbreviation): -# interface_name = interface_name.replace(interface_abbreviation, interface_full, 1) -# break -# return interface_name - - -# def normalize_interface_type(interface_type): -# """Normalize interface types.""" -# if interface_type in CISCO_TO_NAUTOBOT_INTERFACE_TYPE: -# return CISCO_TO_NAUTOBOT_INTERFACE_TYPE[interface_type] -# return "other" - - -# def normalize_tagged_interface(tagged_interface): -# """Normalize tagged interface types.""" -# if tagged_interface in TAGGED_INTERFACE_TYPES: -# return TAGGED_INTERFACE_TYPES[tagged_interface] -# return "" - - -# def format_ni_data_cisco_ios(command, command_result): -# """Format cisco_ios data.""" -# all_results = {} -# # command = ["show version", "show interfaces", "show vlan", "show interfaces switchport"] -# for host_name, result in command_result.items(): -# if host_name not in all_results: -# all_results[host_name] = {"interfaces": {}, "serial": ""} - -# if command == "show version": -# serial_info = result.result[0] -# serial_number = serial_info.get("serial") -# all_results[host_name]["serial"] = serial_number[0] -# elif command == "show interfaces": -# print(f"Interfaces: {result.result}") -# for interface_info in result.result: -# interface_name = interface_info.get("interface") -# media_type = interface_info.get("media_type") -# hardware_type = interface_info.get("hardware_type") -# mtu = interface_info.get("mtu") -# description = interface_info.get("description") -# mac_address = interface_info.get("mac_address") -# link_status = interface_info.get("link_status") - -# if link_status == "up": -# link_status = True -# else: -# link_status = False - -# type = "other" -# if hardware_type == "EtherChannel": -# type = "lag" -# elif hardware_type == "Ethernet SVI": -# type = "virtual" -# elif media_type == "10/100/1000BaseTX": -# type = "100base-tx" -# else: -# type = "other" - -# all_results[host_name]["interfaces"][interface_name] = { -# "mtu": mtu, -# "type": type, -# "media_type": media_type, -# "hardware_type": hardware_type, -# "description": description, -# "mac_address": mac_address, -# "enabled": link_status, -# } -# elif command == "show vlan": -# print(f"Vlan: {result.result}") -# elif command == "show interfaces switchport": -# for interface_info in result.result: -# print(f"Interfaces switchport: {result.result}") -# interface_mode = interface_info.get("admin_mode") -# access_vlan = interface_info.get("access_vlan") -# return all_results diff --git a/nautobot_device_onboarding/utils/inventory_creator.py b/nautobot_device_onboarding/utils/inventory_creator.py index 2f2b080e..c0ab149b 100755 --- a/nautobot_device_onboarding/utils/inventory_creator.py +++ b/nautobot_device_onboarding/utils/inventory_creator.py @@ -2,11 +2,12 @@ from django.conf import settings from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices -from nautobot_device_onboarding.exceptions import OnboardException -from nautobot_device_onboarding.utils.formatter import load_yaml_datafile from netmiko import SSHDetect from nornir.core.inventory import ConnectionOptions, Host +from nautobot_device_onboarding.exceptions import OnboardException +from nautobot_device_onboarding.utils.formatter import load_yaml_datafile + def _get_platform_parsing_info(host_platform, data): """Open and load yaml file.""" @@ -95,7 +96,7 @@ def _set_inventory(host_ip, platform, port, secrets_group): platform=platform, ) }, - data={"platform_parsing_info": parsing_info} + data={"platform_parsing_info": parsing_info}, ) inv.update({host_ip: host}) diff --git a/poetry.lock b/poetry.lock index 0dfe16ad..77d263fd 100755 --- a/poetry.lock +++ b/poetry.lock @@ -124,6 +124,7 @@ tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "p name = "autopep8" version = "2.0.0" description = "A tool that automatically formats Python code to conform to the PEP 8 style guide" +category = "main" optional = false python-versions = "*" files = [ @@ -1063,6 +1064,7 @@ hiredis = ["redis[hiredis] (>=3,!=4.0.0,!=4.0.1)"] name = "django-silk" version = "5.1.0" description = "Silky smooth profiling for the Django Framework" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1347,6 +1349,7 @@ test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre name = "gprof2dot" version = "2022.7.29" description = "Generate a dot graph from the output of several profilers." +category = "main" optional = false python-versions = ">=2.7" files = [ @@ -2896,7 +2899,7 @@ files = [ name = "pycodestyle" version = "2.9.1" description = "Python style guide checker" -category = "dev" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4080,7 +4083,7 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4405,4 +4408,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.8,<3.12" -content-hash = "840fb06383c6c52bb58febbaf5a3d2487b8f46c0f82bb966cb62b6f8b83d3663" +content-hash = "ff0b9f5780232b35c98dca2dcfb617b37c2c4f9faca60df0d21ef7400144a830" diff --git a/pyproject.toml b/pyproject.toml index 94ca30ad..b27c6f45 100755 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,6 +33,7 @@ nautobot = "^2.1.1" nautobot-ssot = "^2.2.0" nautobot-plugin-nornir = "2.0.0" jdiff = "^0.0.6" +ntc-templates = "^4.3.0" [tool.poetry.group.dev.dependencies] From c5540d9fb6d5899e19aeb1e652cf1c9a9d2fa12d Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Fri, 16 Feb 2024 12:48:36 -0600 Subject: [PATCH 071/225] fix unused import --- nautobot_device_onboarding/jobs.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 3e716908..c01be452 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -34,10 +34,6 @@ from nornir import InitNornir from nornir.core.plugins.inventory import InventoryPluginRegister -# from nornir.core.task import Result, Task -# from nornir_nautobot.exceptions import NornirNautobotException -from nornir_netmiko import netmiko_send_command - InventoryPluginRegister.register("nautobot-inventory", NautobotORMInventory) InventoryPluginRegister.register("empty-inventory", EmptyInventory) From 156f8836f5c19a51e481a10153d19fe709c52538 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Fri, 16 Feb 2024 15:24:22 -0600 Subject: [PATCH 072/225] get rest working --- .../command_mappers/cisco_ios.yml | 6 +++++- nautobot_device_onboarding/jobs.py | 13 +++++++------ .../nornir_plays/command_getter.py | 11 ++++++----- .../nornir_plays/empty_inventory.py | 2 +- .../nornir_plays/processor.py | 2 +- nautobot_device_onboarding/utils/formatter.py | 4 ++-- nautobot_device_onboarding/utils/helper.py | 15 +++++++++++++++ .../utils/inventory_creator.py | 14 ++------------ 8 files changed, 39 insertions(+), 28 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/cisco_ios.yml b/nautobot_device_onboarding/command_mappers/cisco_ios.yml index 013ca178..5ec58cd8 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_ios.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_ios.yml @@ -16,7 +16,11 @@ device_onboarding: mask_length: command: "show interfaces" jpath: "[?ip_address=='{{ host_info }}'].prefix_length" - +network_importer: + use_textfsm: true + hostname: + jpath: "[*].hostname" + command: "show version" # commands = ["show version", "show interfaces", "show vlan", "show interfaces switchport"] # all_results = {} diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index c01be452..b538c585 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -26,16 +26,18 @@ from nautobot_device_onboarding.nornir_plays.empty_inventory import EmptyInventory from nautobot_device_onboarding.nornir_plays.logger import NornirLogger from nautobot_device_onboarding.nornir_plays.processor import ProcessorDO -from nautobot_device_onboarding.utils.helper import get_job_filter +from nautobot_device_onboarding.utils.helper import add_platform_parsing_info, get_job_filter from nautobot_device_onboarding.utils.inventory_creator import _set_inventory from nautobot_plugin_nornir.constants import NORNIR_SETTINGS from nautobot_plugin_nornir.plugins.inventory.nautobot_orm import NautobotORMInventory from nautobot_ssot.jobs.base import DataSource from nornir import InitNornir -from nornir.core.plugins.inventory import InventoryPluginRegister +from nornir.core.plugins.inventory import InventoryPluginRegister, TransformFunctionRegister InventoryPluginRegister.register("nautobot-inventory", NautobotORMInventory) InventoryPluginRegister.register("empty-inventory", EmptyInventory) +TransformFunctionRegister.register("transform_to_add_command_parser_info", add_platform_parsing_info) + PLUGIN_SETTINGS = settings.PLUGINS_CONFIG["nautobot_device_onboarding"] @@ -518,7 +520,7 @@ def run(self, *args, **kwargs): entered_ip, self.platform, self.port, self.secrets_group ) nr_with_processors.inventory.hosts.update(single_host_inventory_constructed) - nr_with_processors.run(task=netmiko_send_commands) + nr_with_processors.run(task=netmiko_send_commands, command_getter_job="device_onboarding") except Exception as err: # pylint: disable=broad-exception-caught self.logger.error("Error: %s", err) return err @@ -581,12 +583,11 @@ def run(self, *args, **kwargs): "credentials_class": NORNIR_SETTINGS.get("credentials"), "queryset": qs, }, - # need to figure out how to inject the platform_yaml_data here into data + "transform_function": "transform_to_add_command_parser_info", }, ) as nornir_obj: - # commands = ["show version", "show interfaces", "show vlan", "show interfaces switchport"] nr_with_processors = nornir_obj.with_processors([ProcessorDO(logger, compiled_results)]) - nr_with_processors.run(task=netmiko_send_commands) + nr_with_processors.run(task=netmiko_send_commands, command_getter_job="network_importer") except Exception as err: # pylint: disable=broad-exception-caught self.logger.info("Error: %s", err) return err diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index 69262573..caea61a8 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -4,18 +4,19 @@ from nornir_netmiko.tasks import netmiko_send_command -def _get_commands_to_run(yaml_parsed_info): +def _get_commands_to_run(yaml_parsed_info, command_getter_job): """Load yaml file and look up all commands that need to be run.""" commands = [] - for key, value in yaml_parsed_info["device_onboarding"].items(): + for key, value in yaml_parsed_info[command_getter_job].items(): if not key == "use_textfsm": commands.append(value["command"]) + print(f"COMMANDS: {commands}") return list(set(commands)) -def netmiko_send_commands(task: Task): +def netmiko_send_commands(task: Task, command_getter_job: str): """Run commands specified in PLATFORM_COMMAND_MAP.""" - commands = _get_commands_to_run(task.host.data["platform_parsing_info"]) + commands = _get_commands_to_run(task.host.data["platform_parsing_info"], command_getter_job) for command in commands: - command_use_textfsm = task.host.data["platform_parsing_info"]["device_onboarding"]["use_textfsm"] + command_use_textfsm = task.host.data["platform_parsing_info"][command_getter_job]["use_textfsm"] task.run(task=netmiko_send_command, name=command, command_string=command, use_textfsm=command_use_textfsm) diff --git a/nautobot_device_onboarding/nornir_plays/empty_inventory.py b/nautobot_device_onboarding/nornir_plays/empty_inventory.py index 460717d2..d9531d72 100755 --- a/nautobot_device_onboarding/nornir_plays/empty_inventory.py +++ b/nautobot_device_onboarding/nornir_plays/empty_inventory.py @@ -4,7 +4,7 @@ class EmptyInventory: - """Creates an empty Nornir Inventory to be populated later.""" + """Creates an empty Nornir inventory.""" def load(self) -> Inventory: """Create a default empty inventory.""" diff --git a/nautobot_device_onboarding/nornir_plays/processor.py b/nautobot_device_onboarding/nornir_plays/processor.py index 7d9dc13e..45575e32 100755 --- a/nautobot_device_onboarding/nornir_plays/processor.py +++ b/nautobot_device_onboarding/nornir_plays/processor.py @@ -53,7 +53,7 @@ def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult "failed": result.failed, } ) - formatted_data = extract_show_data(host, result) + formatted_data = extract_show_data(host, result, task.parent_task.params['command_getter_job']) # revist should be able to just update self.data with full formatted_data for k, v in formatted_data.items(): self.data[host.name][k] = v diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index 37dc944d..abc207db 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -32,7 +32,7 @@ def load_yaml_datafile(filename, config=None): return yaml.safe_load(populated) -def extract_show_data(host, multi_result): +def extract_show_data(host, multi_result, command_getter_type): """Take a result of show command and extra specific needed data. Args: @@ -45,7 +45,7 @@ def extract_show_data(host, multi_result): command_jpaths = host.data["platform_parsing_info"] result_dict = {} - for default_dict_field, command_info in command_jpaths["device_onboarding"].items(): + for default_dict_field, command_info in command_jpaths[command_getter_type].items(): if not default_dict_field == "use_textfsm": if command_info["command"] == multi_result[0].name: extracted_value = extract_data_from_json(multi_result[0].result, command_info["jpath"]) diff --git a/nautobot_device_onboarding/utils/helper.py b/nautobot_device_onboarding/utils/helper.py index d5a86150..df5a7a92 100644 --- a/nautobot_device_onboarding/utils/helper.py +++ b/nautobot_device_onboarding/utils/helper.py @@ -2,6 +2,7 @@ from nautobot.dcim.filters import DeviceFilterSet from nautobot.dcim.models import Device +from nautobot_device_onboarding.utils.formatter import load_yaml_datafile from nornir_nautobot.exceptions import NornirNautobotException FIELDS_PK = { @@ -47,3 +48,17 @@ def get_job_filter(data=None): ) return devices_filtered.qs + + +def _get_platform_parsing_info(host_platform, data): + """Open and load yaml file.""" + if host_platform == "cisco_xe": + host_platform = "cisco_ios" + yaml_parsing_info = load_yaml_datafile(f"{host_platform}.yml", config=data) + return yaml_parsing_info + + +def add_platform_parsing_info(host): + """This nornir transform function adds platform parsing info.""" + parsing_info = _get_platform_parsing_info(host.platform, data={"host_info": host}) + host.data.update({"platform_parsing_info": parsing_info}) diff --git a/nautobot_device_onboarding/utils/inventory_creator.py b/nautobot_device_onboarding/utils/inventory_creator.py index c0ab149b..b7eff582 100755 --- a/nautobot_device_onboarding/utils/inventory_creator.py +++ b/nautobot_device_onboarding/utils/inventory_creator.py @@ -2,20 +2,11 @@ from django.conf import settings from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices +from nautobot_device_onboarding.exceptions import OnboardException +from nautobot_device_onboarding.utils.helper import _get_platform_parsing_info from netmiko import SSHDetect from nornir.core.inventory import ConnectionOptions, Host -from nautobot_device_onboarding.exceptions import OnboardException -from nautobot_device_onboarding.utils.formatter import load_yaml_datafile - - -def _get_platform_parsing_info(host_platform, data): - """Open and load yaml file.""" - if host_platform == "cisco_xe": - host_platform = "cisco_ios" - yaml_parsing_info = load_yaml_datafile(f"{host_platform}.yml", config=data) - return yaml_parsing_info - def _parse_credentials(credentials): """Parse and return dictionary of credentials.""" @@ -77,7 +68,6 @@ def _set_inventory(host_ip, platform, port, secrets_group): platform = platform.network_driver else: platform = guess_netmiko_device_type(host_ip, username, password, port) - parsing_info = _get_platform_parsing_info(platform, data={"host_info": host_ip}) host = Host( From 83cd54c5c2f2a5a3f6f08c67ae0058688609f649 Mon Sep 17 00:00:00 2001 From: David Cates Date: Fri, 16 Feb 2024 17:15:29 -0700 Subject: [PATCH 073/225] add data type check for device on boarding network adapter --- .../diffsync/adapters/onboarding_adapters.py | 149 +++++++------ .../diffsync/mock_data.py | 4 +- poetry.lock | 208 +----------------- 3 files changed, 95 insertions(+), 266 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index ea36a328..2618c1f5 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -183,89 +183,114 @@ def execute_command_getter(self): self.job.logger.debug(f"Command Getter Job Result: {result.result}") self._handle_failed_connections(device_data=result.result) + def _check_data_type(self, data): + """Verify the data returned from CommandGetter is not a string.""" + data_type_check_result = True + if type(data) == str: + data_type_check_result = False + return data_type_check_result + + def load_manufacturers(self): """Load manufacturers into the DiffSync store.""" for ip_address in self.device_data: - if self.job.debug: - self.job.logger.debug(f"loading manufacturer data for {ip_address}") - onboarding_manufacturer = self.manufacturer( - diffsync=self, - name=self.device_data[ip_address]["manufacturer"], - ) # type: ignore try: - self.add(onboarding_manufacturer) - except diffsync.ObjectAlreadyExists: - pass + if self.job.debug: + self.job.logger.debug(f"loading manufacturer data for {ip_address}") + onboarding_manufacturer = self.manufacturer( + diffsync=self, + name=self.device_data[ip_address]["manufacturer"], + ) # type: ignore + try: + self.add(onboarding_manufacturer) + except diffsync.ObjectAlreadyExists: + pass + except KeyError as err: + self.job.logger.error(f"{ip_address}: Manufacturer due to missing key in returned data, {err}") def load_platforms(self): """Load platforms into the DiffSync store.""" for ip_address in self.device_data: - if self.job.debug: - self.job.logger.debug(f"loading platform data for {ip_address}") - onboarding_platform = self.platform( - diffsync=self, - name=self.device_data[ip_address]["platform"], - manufacturer__name=self.device_data[ip_address]["manufacturer"], - network_driver=self.device_data[ip_address]["network_driver"], - ) # type: ignore try: - self.add(onboarding_platform) - except diffsync.ObjectAlreadyExists: - pass + if self.job.debug: + self.job.logger.debug(f"loading platform data for {ip_address}") + onboarding_platform = self.platform( + diffsync=self, + name=self.device_data[ip_address]["platform"], + manufacturer__name=self.device_data[ip_address]["manufacturer"], + network_driver=self.device_data[ip_address]["network_driver"], + ) # type: ignore + try: + self.add(onboarding_platform) + except diffsync.ObjectAlreadyExists: + pass + except KeyError as err: + self.job.logger.error(f"{ip_address}: Platform due to missing key in returned data, {err}") def load_device_types(self): """Load device types into the DiffSync store.""" for ip_address in self.device_data: - if self.job.debug: - self.job.logger.debug(f"loading device_type data for {ip_address}") - onboarding_device_type = self.device_type( - diffsync=self, - model=self.device_data[ip_address]["device_type"], - part_number=self.device_data[ip_address]["device_type"], - manufacturer__name=self.device_data[ip_address]["manufacturer"], - ) # type: ignore try: - self.add(onboarding_device_type) - except diffsync.ObjectAlreadyExists: - pass + if self.job.debug: + self.job.logger.debug(f"loading device_type data for {ip_address}") + onboarding_device_type = self.device_type( + diffsync=self, + model=self.device_data[ip_address]["device_type"], + part_number=self.device_data[ip_address]["device_type"], + manufacturer__name=self.device_data[ip_address]["manufacturer"], + ) # type: ignore + try: + self.add(onboarding_device_type) + except diffsync.ObjectAlreadyExists: + pass + except KeyError as err: + self.job.logger.error(f"{ip_address}: DeviceType due to missing key in returned data, {err}") def load_devices(self): """Load devices into the DiffSync store.""" for ip_address in self.device_data: - if self.job.debug: - self.job.logger.debug(f"loading device data for {ip_address}") - onboarding_device = self.device( - diffsync=self, - device_type__model=self.device_data[ip_address]["device_type"], - location__name=self.job.location.name, - name=self.device_data[ip_address]["hostname"], - platform__name=self.device_data[ip_address]["platform"], - primary_ip4__host=ip_address, - primary_ip4__status__name=self.job.ip_address_status.name, - role__name=self.job.device_role.name, - status__name=self.job.device_status.name, - secrets_group__name=self.job.secrets_group.name, - interfaces=[self.device_data[ip_address]["mgmt_interface"]], - mask_length=self.device_data[ip_address]["mask_length"], - serial=self.device_data[ip_address]["serial"], - ) # type: ignore try: - self.add(onboarding_device) if self.job.debug: - self.job.logger.debug(f"Device: {self.device_data[ip_address]['hostname']} loaded.") - except diffsync.ObjectAlreadyExists: - self.job.logger.error( - f"Device: {self.device_data[ip_address]['hostname']} has already been loaded! " - f"Duplicate devices will not be synced. " - f"[Serial Number: {self.device_data[ip_address]['serial']}, " - f"IP Address: {ip_address}]" - ) + self.job.logger.debug(f"loading device data for {ip_address}") + onboarding_device = self.device( + diffsync=self, + device_type__model=self.device_data[ip_address]["device_type"], + location__name=self.job.location.name, + name=self.device_data[ip_address]["hostname"], + platform__name=self.device_data[ip_address]["platform"], + primary_ip4__host=ip_address, + primary_ip4__status__name=self.job.ip_address_status.name, + role__name=self.job.device_role.name, + status__name=self.job.device_status.name, + secrets_group__name=self.job.secrets_group.name, + interfaces=[self.device_data[ip_address]["mgmt_interface"]], + mask_length=self.device_data[ip_address]["mask_length"], + serial=self.device_data[ip_address]["serial"], + ) # type: ignore + try: + self.add(onboarding_device) + if self.job.debug: + self.job.logger.debug(f"Device: {self.device_data[ip_address]['hostname']} loaded.") + except diffsync.ObjectAlreadyExists: + self.job.logger.error( + f"Device: {self.device_data[ip_address]['hostname']} has already been loaded! " + f"Duplicate devices will not be synced. " + f"[Serial Number: {self.device_data[ip_address]['serial']}, " + f"IP Address: {ip_address}]" + ) + except KeyError as err: + self.job.logger.error(f"{ip_address}: Unable to load Device due to missing key in returned data, {err}") def load(self): """Load network data.""" self._validate_ip_addresses(self.job.ip_addresses) - self.execute_command_getter() - self.load_manufacturers() - self.load_platforms() - self.load_device_types() - self.load_devices() + # self.execute_command_getter() + data_type_check = self._check_data_type(self.device_data) + if data_type_check: + self.load_manufacturers() + self.load_platforms() + self.load_device_types() + self.load_devices() + else: + self.job.logger.error("Data returned from CommandGetter is not the correct type. " + " No devices will be onboarded, check the CommandGetter job logs.") diff --git a/nautobot_device_onboarding/diffsync/mock_data.py b/nautobot_device_onboarding/diffsync/mock_data.py index 1e9f7152..7741c502 100644 --- a/nautobot_device_onboarding/diffsync/mock_data.py +++ b/nautobot_device_onboarding/diffsync/mock_data.py @@ -176,11 +176,11 @@ "hostname": "demo-cisco-xe1", "serial": "9ABUXU581111", "device_type": "CSR1000V17", - "mgmt_interface": "GigabitEthernet20", + # "mgmt_interface": "GigabitEthernet20", "manufacturer": "Cisco", "platform": "IOS-test", "network_driver": "cisco_ios", - "mask_length": 16, + # "mask_length": 16, }, "10.1.1.10": { "hostname": "demo-cisco-xe2", diff --git a/poetry.lock b/poetry.lock index 77d263fd..e1e21631 100755 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "amqp" version = "5.2.0" description = "Low-level AMQP client for Python (fork of amqplib)." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -19,7 +18,6 @@ vine = ">=5.0.0,<6.0.0" name = "aniso8601" version = "7.0.0" description = "A library for parsing ISO 8601 strings." -category = "main" optional = false python-versions = "*" files = [ @@ -31,7 +29,6 @@ files = [ name = "anyio" version = "4.2.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -54,7 +51,6 @@ trio = ["trio (>=0.23)"] name = "asgiref" version = "3.7.2" description = "ASGI specs, helper code, and adapters" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -72,7 +68,6 @@ tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] name = "astroid" version = "2.15.8" description = "An abstract syntax tree for Python with inference support." -category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -92,7 +87,6 @@ wrapt = [ name = "async-timeout" version = "4.0.3" description = "Timeout context manager for asyncio programs" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -104,7 +98,6 @@ files = [ name = "attrs" version = "23.2.0" description = "Classes Without Boilerplate" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -124,7 +117,6 @@ tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "p name = "autopep8" version = "2.0.0" description = "A tool that automatically formats Python code to conform to the PEP 8 style guide" -category = "main" optional = false python-versions = "*" files = [ @@ -140,7 +132,6 @@ tomli = "*" name = "backports-zoneinfo" version = "0.2.1" description = "Backport of the standard library zoneinfo module" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -172,7 +163,6 @@ tzdata = ["tzdata"] name = "bandit" version = "1.7.7" description = "Security oriented static analyser for python code." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -196,7 +186,6 @@ yaml = ["PyYAML"] name = "bcrypt" version = "4.1.2" description = "Modern password hashing for your software and your servers" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -237,7 +226,6 @@ typecheck = ["mypy"] name = "billiard" version = "4.2.0" description = "Python multiprocessing fork with improvements and bugfixes" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -249,7 +237,6 @@ files = [ name = "black" version = "24.2.0" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -296,7 +283,6 @@ uvloop = ["uvloop (>=0.15.2)"] name = "celery" version = "5.3.6" description = "Distributed Task Queue." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -353,7 +339,6 @@ zstd = ["zstandard (==0.22.0)"] name = "certifi" version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -365,7 +350,6 @@ files = [ name = "cffi" version = "1.16.0" description = "Foreign Function Interface for Python calling C code." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -430,7 +414,6 @@ pycparser = "*" name = "charset-normalizer" version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -530,7 +513,6 @@ files = [ name = "click" version = "8.1.7" description = "Composable command line interface toolkit" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -545,7 +527,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "click-didyoumean" version = "0.3.0" description = "Enables git-like *did-you-mean* feature in click" -category = "main" optional = false python-versions = ">=3.6.2,<4.0.0" files = [ @@ -560,7 +541,6 @@ click = ">=7" name = "click-plugins" version = "1.1.1" description = "An extension module for click to enable registering CLI commands via setuptools entry-points." -category = "main" optional = false python-versions = "*" files = [ @@ -578,7 +558,6 @@ dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] name = "click-repl" version = "0.3.0" description = "REPL plugin for Click" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -597,7 +576,6 @@ testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -609,7 +587,6 @@ files = [ name = "coverage" version = "7.4.1" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -674,7 +651,6 @@ toml = ["tomli"] name = "cron-descriptor" version = "1.4.3" description = "A Python library that converts cron expressions into human readable strings." -category = "main" optional = false python-versions = "*" files = [ @@ -689,7 +665,6 @@ dev = ["polib"] name = "cryptography" version = "42.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -744,7 +719,6 @@ test-randomorder = ["pytest-randomly"] name = "deepdiff" version = "6.7.1" description = "Deep Difference and Search of any Python object/data. Recreate objects by adding adding deltas to each other." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -763,7 +737,6 @@ optimize = ["orjson"] name = "defusedxml" version = "0.7.1" description = "XML bomb protection for Python stdlib modules" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -775,7 +748,6 @@ files = [ name = "diffsync" version = "1.10.0" description = "Library to easily sync/diff/update 2 different data sources" -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -797,7 +769,6 @@ redis = ["redis (>=4.3,<5.0)"] name = "dill" version = "0.3.8" description = "serialize all of Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -813,7 +784,6 @@ profile = ["gprof2dot (>=2022.7.29)"] name = "django" version = "3.2.24" description = "A high-level Python Web framework that encourages rapid development and clean, pragmatic design." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -834,7 +804,6 @@ bcrypt = ["bcrypt"] name = "django-ajax-tables" version = "1.1.1" description = "Django tag for ajax-enabled tables" -category = "main" optional = false python-versions = "*" files = [ @@ -846,7 +815,6 @@ files = [ name = "django-celery-beat" version = "2.5.0" description = "Database-backed Periodic Tasks." -category = "main" optional = false python-versions = "*" files = [ @@ -867,7 +835,6 @@ tzdata = "*" name = "django-celery-results" version = "2.4.0" description = "Celery result backends for Django." -category = "main" optional = false python-versions = "*" files = [ @@ -882,7 +849,6 @@ celery = ">=5.2.3,<6.0" name = "django-constance" version = "2.9.1" description = "Django live settings with pluggable backends, including Redis." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -901,7 +867,6 @@ redis = ["redis"] name = "django-cors-headers" version = "4.2.0" description = "django-cors-headers is a Django application for handling the server headers required for Cross-Origin Resource Sharing (CORS)." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -916,7 +881,6 @@ Django = ">=3.2" name = "django-db-file-storage" version = "0.5.6.1" description = "Custom FILE_STORAGE for Django. Saves files in your database instead of your file system." -category = "main" optional = false python-versions = "*" files = [ @@ -931,7 +895,6 @@ Django = "*" name = "django-debug-toolbar" version = "4.3.0" description = "A configurable set of panels that display various debug information about the current request/response." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -947,7 +910,6 @@ sqlparse = ">=0.2" name = "django-extensions" version = "3.2.3" description = "Extensions for Django" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -962,7 +924,6 @@ Django = ">=3.2" name = "django-filter" version = "23.1" description = "Django-filter is a reusable Django application for allowing users to filter querysets dynamically." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -977,7 +938,6 @@ Django = ">=3.2" name = "django-health-check" version = "3.17.0" description = "Run checks on services like databases, queue servers, celery processes, etc." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -996,7 +956,6 @@ test = ["celery", "pytest", "pytest-cov", "pytest-django", "redis"] name = "django-jinja" version = "2.10.2" description = "Jinja2 templating language integrated in Django." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1012,7 +971,6 @@ jinja2 = ">=3" name = "django-picklefield" version = "3.1" description = "Pickled object field for Django" -category = "main" optional = false python-versions = ">=3" files = [ @@ -1030,7 +988,6 @@ tests = ["tox"] name = "django-prometheus" version = "2.3.1" description = "Django middlewares to monitor your application with Prometheus.io." -category = "main" optional = false python-versions = "*" files = [ @@ -1045,7 +1002,6 @@ prometheus-client = ">=0.7" name = "django-redis" version = "5.3.0" description = "Full featured redis cache backend for Django." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1064,7 +1020,6 @@ hiredis = ["redis[hiredis] (>=3,!=4.0.0,!=4.0.1)"] name = "django-silk" version = "5.1.0" description = "Silky smooth profiling for the Django Framework" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1082,7 +1037,6 @@ sqlparse = "*" name = "django-tables2" version = "2.6.0" description = "Table/data-grid framework for Django" -category = "main" optional = false python-versions = "*" files = [ @@ -1100,7 +1054,6 @@ tablib = ["tablib"] name = "django-taggit" version = "4.0.0" description = "django-taggit is a reusable Django application for simple tagging." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1115,7 +1068,6 @@ Django = ">=3.2" name = "django-timezone-field" version = "5.1" description = "A Django app providing DB, form, and REST framework fields for zoneinfo and pytz timezone objects." -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -1125,14 +1077,13 @@ files = [ [package.dependencies] "backports.zoneinfo" = {version = ">=0.2.1,<0.3.0", markers = "python_version < \"3.9\""} -Django = ">=2.2,<3.0.0 || >=3.2.0,<5.0" +Django = ">=2.2,<3.0.dev0 || >=3.2.dev0,<5.0" pytz = "*" [[package]] name = "django-tree-queries" version = "0.16.1" description = "Tree queries with explicit opt-in, without configurability" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1147,7 +1098,6 @@ tests = ["coverage"] name = "django-webserver" version = "1.2.0" description = "Django management commands for production webservers" -category = "main" optional = false python-versions = "*" files = [ @@ -1169,7 +1119,6 @@ waitress = ["waitress"] name = "djangorestframework" version = "3.14.0" description = "Web APIs for Django, made easy." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1185,7 +1134,6 @@ pytz = "*" name = "drf-react-template-framework" version = "0.0.17" description = "Django REST Framework plugin that creates form schemas for react-jsonschema-form" -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -1200,7 +1148,6 @@ djangorestframework = ">=3.12.0,<4.0.0" name = "drf-spectacular" version = "0.26.3" description = "Sane and flexible OpenAPI 3 schema generation for Django REST framework" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1225,7 +1172,6 @@ sidecar = ["drf-spectacular-sidecar"] name = "drf-spectacular-sidecar" version = "2024.2.1" description = "Serve self-contained distribution builds of Swagger UI and Redoc with Django" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1240,7 +1186,6 @@ Django = ">=2.2" name = "emoji" version = "2.8.0" description = "Emoji for Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1255,7 +1200,6 @@ dev = ["coverage", "coveralls", "pytest"] name = "exceptiongroup" version = "1.2.0" description = "Backport of PEP 654 (exception groups)" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1270,7 +1214,6 @@ test = ["pytest (>=6)"] name = "flake8" version = "5.0.4" description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" optional = false python-versions = ">=3.6.1" files = [ @@ -1287,7 +1230,6 @@ pyflakes = ">=2.5.0,<2.6.0" name = "future" version = "0.18.3" description = "Clean single-source support for Python 3 and 2" -category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1298,7 +1240,6 @@ files = [ name = "ghp-import" version = "2.1.0" description = "Copy your docs directly to the gh-pages branch." -category = "dev" optional = false python-versions = "*" files = [ @@ -1316,7 +1257,6 @@ dev = ["flake8", "markdown", "twine", "wheel"] name = "gitdb" version = "4.0.11" description = "Git Object Database" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1331,7 +1271,6 @@ smmap = ">=3.0.1,<6" name = "gitpython" version = "3.1.41" description = "GitPython is a Python library used to interact with Git repositories" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1349,7 +1288,6 @@ test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre name = "gprof2dot" version = "2022.7.29" description = "Generate a dot graph from the output of several profilers." -category = "main" optional = false python-versions = ">=2.7" files = [ @@ -1361,7 +1299,6 @@ files = [ name = "graphene" version = "2.1.9" description = "GraphQL Framework for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -1384,7 +1321,6 @@ test = ["coveralls", "fastdiff (==0.2.0)", "iso8601", "mock", "promise", "pytest name = "graphene-django" version = "2.16.0" description = "Graphene Django integration" -category = "main" optional = false python-versions = "*" files = [ @@ -1409,7 +1345,6 @@ test = ["coveralls", "django-filter (>=2)", "djangorestframework (>=3.6.3)", "mo name = "graphene-django-optimizer" version = "0.8.0" description = "Optimize database access inside graphene queries." -category = "main" optional = false python-versions = "*" files = [ @@ -1420,7 +1355,6 @@ files = [ name = "graphql-core" version = "2.3.2" description = "GraphQL implementation for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -1441,7 +1375,6 @@ test = ["coveralls (==1.11.1)", "cython (==0.29.17)", "gevent (==1.5.0)", "pyann name = "graphql-relay" version = "2.0.1" description = "Relay implementation for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -1458,7 +1391,6 @@ six = ">=1.12" name = "griffe" version = "0.40.1" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1473,7 +1405,6 @@ colorama = ">=0.4" name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1485,7 +1416,6 @@ files = [ name = "httpcore" version = "0.17.3" description = "A minimal low-level HTTP client." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1497,17 +1427,16 @@ files = [ anyio = ">=3.0,<5.0" certifi = "*" h11 = ">=0.13,<0.15" -sniffio = ">=1.0.0,<2.0.0" +sniffio = "==1.*" [package.extras] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] [[package]] name = "httpx" version = "0.24.1" description = "The next generation HTTP client." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1523,15 +1452,14 @@ sniffio = "*" [package.extras] brotli = ["brotli", "brotlicffi"] -cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<14)"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] [[package]] name = "idna" version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1543,7 +1471,6 @@ files = [ name = "importlib-metadata" version = "4.13.0" description = "Read metadata from Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1563,7 +1490,6 @@ testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packag name = "importlib-resources" version = "5.13.0" description = "Read resources from Python packages" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1582,7 +1508,6 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", name = "inflection" version = "0.5.1" description = "A port of Ruby on Rails inflector to Python" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1594,7 +1519,6 @@ files = [ name = "invoke" version = "2.2.0" description = "Pythonic task execution" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1606,7 +1530,6 @@ files = [ name = "isort" version = "5.13.2" description = "A Python utility / library to sort Python imports." -category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -1621,7 +1544,6 @@ colors = ["colorama (>=0.4.6)"] name = "jdiff" version = "0.0.6" description = "A light-weight library to compare structured output from network devices show commands." -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -1637,7 +1559,6 @@ jmespath = ">=1.0.1,<2.0.0" name = "jinja2" version = "3.1.3" description = "A very fast and expressive template engine." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1655,7 +1576,6 @@ i18n = ["Babel (>=2.7)"] name = "jmespath" version = "1.0.1" description = "JSON Matching Expressions" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1667,7 +1587,6 @@ files = [ name = "jsonschema" version = "4.18.6" description = "An implementation of JSON Schema validation for Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1691,7 +1610,6 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- name = "jsonschema-specifications" version = "2023.12.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1707,7 +1625,6 @@ referencing = ">=0.31.0" name = "junos-eznc" version = "2.7.0" description = "Junos 'EZ' automation for non-programmers" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1732,7 +1649,6 @@ yamlordereddictloader = "*" name = "kombu" version = "5.3.5" description = "Messaging library for Python." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1767,7 +1683,6 @@ zookeeper = ["kazoo (>=2.8.0)"] name = "lazy-object-proxy" version = "1.10.0" description = "A fast and thorough lazy object proxy." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1814,7 +1729,6 @@ files = [ name = "lxml" version = "5.1.0" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1908,7 +1822,6 @@ source = ["Cython (>=3.0.7)"] name = "markdown" version = "3.3.7" description = "Python implementation of Markdown." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1926,7 +1839,6 @@ testing = ["coverage", "pyyaml"] name = "markdown-it-py" version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1951,7 +1863,6 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "markupsafe" version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2021,7 +1932,6 @@ files = [ name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2033,7 +1943,6 @@ files = [ name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2045,7 +1954,6 @@ files = [ name = "mergedeep" version = "1.3.4" description = "A deep merge function for 🐍." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2057,7 +1965,6 @@ files = [ name = "mkdocs" version = "1.5.2" description = "Project documentation with Markdown." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2089,7 +1996,6 @@ min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-imp name = "mkdocs-autorefs" version = "0.5.0" description = "Automatically link across pages in MkDocs." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2105,7 +2011,6 @@ mkdocs = ">=1.1" name = "mkdocs-material" version = "9.1.15" description = "Documentation that simply works" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2128,7 +2033,6 @@ requests = ">=2.26" name = "mkdocs-material-extensions" version = "1.3.1" description = "Extension pack for Python Markdown and MkDocs Material." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2140,7 +2044,6 @@ files = [ name = "mkdocs-version-annotations" version = "1.0.0" description = "MkDocs plugin to add custom admonitions for documenting version differences" -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -2152,7 +2055,6 @@ files = [ name = "mkdocstrings" version = "0.22.0" description = "Automatic documentation from sources, for MkDocs." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2179,7 +2081,6 @@ python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"] name = "mkdocstrings-python" version = "1.5.2" description = "A Python handler for mkdocstrings." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2195,7 +2096,6 @@ mkdocstrings = ">=0.20" name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -2207,7 +2107,6 @@ files = [ name = "napalm" version = "4.1.0" description = "Network Automation and Programmability Abstraction Layer with Multivendor support" -category = "main" optional = false python-versions = "*" files = [ @@ -2240,7 +2139,6 @@ typing-extensions = ">=4.3.0" name = "nautobot" version = "2.1.4" description = "Source of truth and network automation platform." -category = "main" optional = false python-versions = ">=3.8,<3.12" files = [ @@ -2305,7 +2203,6 @@ sso = ["social-auth-core[openidconnect,saml] (>=4.4.2,<4.5.0)"] name = "nautobot-plugin-nornir" version = "2.0.0" description = "Nautobot Nornir plugin." -category = "main" optional = false python-versions = ">=3.8,<3.12" files = [ @@ -2324,7 +2221,6 @@ nautobot = ["nautobot (>=2.0.0,<3.0.0)"] name = "nautobot-ssot" version = "2.2.0" description = "Nautobot Single Source of Truth" -category = "main" optional = false python-versions = ">=3.8,<3.12" files = [ @@ -2355,7 +2251,6 @@ servicenow = ["Jinja2 (>=2.11.3)", "PyYAML (>=6)", "ijson (>=2.5.1)", "oauthlib name = "ncclient" version = "0.6.15" description = "Python library for NETCONF clients" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -2372,7 +2267,6 @@ six = "*" name = "netaddr" version = "0.8.0" description = "A network address manipulation library for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -2384,7 +2278,6 @@ files = [ name = "netmiko" version = "4.3.0" description = "Multi-vendor library to simplify legacy CLI connections to network devices" -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2404,7 +2297,6 @@ textfsm = ">=1.1.3" name = "netutils" version = "1.6.0" description = "Common helper functions useful in network automation." -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2419,7 +2311,6 @@ optionals = ["jsonschema (>=4.17.3,<5.0.0)", "napalm (>=4.0.0,<5.0.0)"] name = "nh3" version = "0.2.15" description = "Python bindings to the ammonia HTML sanitization library." -category = "main" optional = false python-versions = "*" files = [ @@ -2445,7 +2336,6 @@ files = [ name = "nornir" version = "3.4.1" description = "Pluggable multi-threaded framework with inventory management to help operate collections of devices" -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2462,7 +2352,6 @@ mypy_extensions = ">=1.0.0,<2.0.0" name = "nornir-jinja2" version = "0.2.0" description = "Jinja2 plugins for nornir" -category = "main" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -2478,7 +2367,6 @@ nornir = ">=3,<4" name = "nornir-napalm" version = "0.4.0" description = "NAPALM's plugins for nornir" -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -2494,7 +2382,6 @@ nornir = ">=3,<4" name = "nornir-nautobot" version = "3.1.1" description = "Nornir Nautobot" -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2520,7 +2407,6 @@ mikrotik-driver = ["routeros-api (>=0.17.0,<0.18.0)"] name = "nornir-netmiko" version = "1.0.1" description = "Netmiko's plugins for Nornir" -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2535,7 +2421,6 @@ netmiko = ">=4.0.0,<5.0.0" name = "nornir-utils" version = "0.2.0" description = "Collection of plugins and functions for nornir that don't require external dependencies" -category = "main" optional = false python-versions = ">=3.6.2,<4.0.0" files = [ @@ -2551,7 +2436,6 @@ nornir = ">=3,<4" name = "ntc-templates" version = "4.3.0" description = "TextFSM Templates for Network Devices, and Python wrapper for TextFSM's CliTable." -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2566,7 +2450,6 @@ textfsm = ">=1.1.0,<2.0.0" name = "oauthlib" version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2583,7 +2466,6 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] name = "ordered-set" version = "4.1.0" description = "An OrderedSet is a custom MutableSet that remembers its order, so that every" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2598,7 +2480,6 @@ dev = ["black", "mypy", "pytest"] name = "packaging" version = "23.2" description = "Core utilities for Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2610,7 +2491,6 @@ files = [ name = "paramiko" version = "3.4.0" description = "SSH2 protocol library" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2632,7 +2512,6 @@ invoke = ["invoke (>=2.0)"] name = "pathspec" version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2644,7 +2523,6 @@ files = [ name = "pbr" version = "6.0.0" description = "Python Build Reasonableness" -category = "dev" optional = false python-versions = ">=2.6" files = [ @@ -2656,7 +2534,6 @@ files = [ name = "pillow" version = "10.2.0" description = "Python Imaging Library (Fork)" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2742,7 +2619,6 @@ xmp = ["defusedxml"] name = "pkgutil-resolve-name" version = "1.3.10" description = "Resolve a name to an object." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2754,7 +2630,6 @@ files = [ name = "platformdirs" version = "4.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2770,7 +2645,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest- name = "prometheus-client" version = "0.17.1" description = "Python client for the Prometheus monitoring system." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2785,7 +2659,6 @@ twisted = ["twisted"] name = "promise" version = "2.3" description = "Promises/A+ implementation for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -2802,7 +2675,6 @@ test = ["coveralls", "futures", "mock", "pytest (>=2.7.3)", "pytest-benchmark", name = "prompt-toolkit" version = "3.0.43" description = "Library for building powerful interactive command lines in Python" -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -2817,7 +2689,6 @@ wcwidth = "*" name = "psycopg2-binary" version = "2.9.9" description = "psycopg2 - Python-PostgreSQL Database Adapter" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2899,7 +2770,6 @@ files = [ name = "pycodestyle" version = "2.9.1" description = "Python style guide checker" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2911,7 +2781,6 @@ files = [ name = "pycparser" version = "2.21" description = "C parser in Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2923,7 +2792,6 @@ files = [ name = "pydantic" version = "1.10.14" description = "Data validation and settings management using python type hints" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2976,7 +2844,6 @@ email = ["email-validator (>=1.0.3)"] name = "pydocstyle" version = "6.3.0" description = "Python docstring style checker" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2994,7 +2861,6 @@ toml = ["tomli (>=1.2.3)"] name = "pyeapi" version = "1.0.2" description = "Python Client for eAPI" -category = "main" optional = false python-versions = "*" files = [ @@ -3012,7 +2878,6 @@ test = ["coverage", "mock"] name = "pyflakes" version = "2.5.0" description = "passive checker of Python programs" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3024,7 +2889,6 @@ files = [ name = "pygments" version = "2.17.2" description = "Pygments is a syntax highlighting package written in Python." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3040,7 +2904,6 @@ windows-terminal = ["colorama (>=0.4.6)"] name = "pyjwt" version = "2.8.0" description = "JSON Web Token implementation in Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3058,7 +2921,6 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] name = "pylint" version = "2.17.7" description = "python code static checker" -category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -3088,7 +2950,6 @@ testutils = ["gitpython (>3)"] name = "pylint-django" version = "2.5.5" description = "A Pylint plugin to help Pylint understand the Django web framework" -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -3107,7 +2968,6 @@ with-django = ["Django (>=2.2)"] name = "pylint-nautobot" version = "0.2.1" description = "Custom Pylint Rules for Nautobot" -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -3125,7 +2985,6 @@ tomli = ">=2.0.1,<3.0.0" name = "pylint-plugin-utils" version = "0.8.2" description = "Utilities and helpers for writing Pylint plugins" -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -3140,7 +2999,6 @@ pylint = ">=1.7" name = "pymdown-extensions" version = "10.4" description = "Extension pack for Python Markdown." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3159,7 +3017,6 @@ extra = ["pygments (>=2.12)"] name = "pynacl" version = "1.5.0" description = "Python binding to the Networking and Cryptography (NaCl) library" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3186,7 +3043,6 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] name = "pynautobot" version = "2.0.2" description = "Nautobot API client library" -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -3203,7 +3059,6 @@ urllib3 = ">=1.21.1,<1.27" name = "pyparsing" version = "3.1.1" description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "main" optional = false python-versions = ">=3.6.8" files = [ @@ -3218,7 +3073,6 @@ diagrams = ["jinja2", "railroad-diagrams"] name = "pyserial" version = "3.5" description = "Python Serial Port Extension" -category = "main" optional = false python-versions = "*" files = [ @@ -3233,7 +3087,6 @@ cp2110 = ["hidapi"] name = "python-crontab" version = "3.0.0" description = "Python Crontab API" -category = "main" optional = false python-versions = "*" files = [ @@ -3252,7 +3105,6 @@ cron-schedule = ["croniter"] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -3267,7 +3119,6 @@ six = ">=1.5" name = "python-slugify" version = "8.0.4" description = "A Python slugify application that also handles Unicode" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3285,7 +3136,6 @@ unidecode = ["Unidecode (>=1.1.1)"] name = "python3-openid" version = "3.2.0" description = "OpenID support for modern servers and consumers." -category = "main" optional = false python-versions = "*" files = [ @@ -3304,7 +3154,6 @@ postgresql = ["psycopg2"] name = "pytz" version = "2024.1" description = "World timezone definitions, modern and historical" -category = "main" optional = false python-versions = "*" files = [ @@ -3316,7 +3165,6 @@ files = [ name = "pyuwsgi" version = "2.0.23.post0" description = "The uWSGI server" -category = "main" optional = false python-versions = "*" files = [ @@ -3369,7 +3217,6 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3429,7 +3276,6 @@ files = [ name = "pyyaml-env-tag" version = "0.1" description = "A custom YAML tag for referencing environment variables in YAML files. " -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3444,7 +3290,6 @@ pyyaml = "*" name = "redis" version = "5.0.1" description = "Python client for Redis database and key-value store" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3463,7 +3308,6 @@ ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)" name = "referencing" version = "0.33.0" description = "JSON Referencing + Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3479,7 +3323,6 @@ rpds-py = ">=0.7.0" name = "regex" version = "2023.12.25" description = "Alternative regular expression module, to replace re." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3582,7 +3425,6 @@ files = [ name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3604,7 +3446,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-oauthlib" version = "1.3.1" description = "OAuthlib authentication support for Requests." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -3623,7 +3464,6 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] name = "rich" version = "13.7.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -3643,7 +3483,6 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] name = "rpds-py" version = "0.18.0" description = "Python bindings to Rust's persistent data structures (rpds)" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3752,7 +3591,6 @@ files = [ name = "ruamel-yaml" version = "0.18.6" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3771,7 +3609,6 @@ jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] name = "ruamel-yaml-clib" version = "0.2.8" description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3831,7 +3668,6 @@ files = [ name = "rx" version = "1.6.3" description = "Reactive Extensions (Rx) for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -3842,7 +3678,6 @@ files = [ name = "scp" version = "0.14.5" description = "scp module for paramiko" -category = "main" optional = false python-versions = "*" files = [ @@ -3857,7 +3692,6 @@ paramiko = "*" name = "setuptools" version = "69.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3874,7 +3708,6 @@ testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jar name = "singledispatch" version = "4.1.0" description = "Backport functools.singledispatch to older Pythons." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3890,7 +3723,6 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -3902,7 +3734,6 @@ files = [ name = "smmap" version = "5.0.1" description = "A pure Python implementation of a sliding window memory map manager" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3914,7 +3745,6 @@ files = [ name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3926,7 +3756,6 @@ files = [ name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "dev" optional = false python-versions = "*" files = [ @@ -3938,7 +3767,6 @@ files = [ name = "social-auth-app-django" version = "5.2.0" description = "Python Social Authentication, Django integration." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3954,7 +3782,6 @@ social-auth-core = ">=4.4.1" name = "social-auth-core" version = "4.5.3" description = "Python social authentication made simple." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3981,7 +3808,6 @@ saml = ["python3-saml (>=1.5.0)"] name = "sqlparse" version = "0.4.4" description = "A non-validating SQL parser." -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -3998,7 +3824,6 @@ test = ["pytest", "pytest-cov"] name = "stevedore" version = "5.1.0" description = "Manage dynamic plugins for Python applications" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -4013,7 +3838,6 @@ pbr = ">=2.0.0,<2.1.0 || >2.1.0" name = "structlog" version = "22.3.0" description = "Structured Logging for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4031,7 +3855,6 @@ typing = ["mypy", "rich", "twisted"] name = "svgwrite" version = "1.4.3" description = "A Python library to create SVG drawings." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4043,7 +3866,6 @@ files = [ name = "text-unidecode" version = "1.3" description = "The most basic Text::Unidecode port" -category = "main" optional = false python-versions = "*" files = [ @@ -4055,7 +3877,6 @@ files = [ name = "textfsm" version = "1.1.3" description = "Python module for parsing semi-structured text into python tables." -category = "main" optional = false python-versions = "*" files = [ @@ -4071,7 +3892,6 @@ six = "*" name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" -category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -4083,7 +3903,6 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4095,7 +3914,6 @@ files = [ name = "tomlkit" version = "0.12.3" description = "Style preserving TOML library" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4107,7 +3925,6 @@ files = [ name = "transitions" version = "0.9.0" description = "A lightweight, object-oriented Python state machine implementation with many extensions." -category = "main" optional = false python-versions = "*" files = [ @@ -4126,7 +3943,6 @@ test = ["pytest"] name = "ttp" version = "0.9.5" description = "Template Text Parser" -category = "main" optional = false python-versions = ">=2.7,<4.0" files = [ @@ -4142,7 +3958,6 @@ full = ["cerberus (>=1.3.0,<1.4.0)", "deepdiff (>=5.8.0,<5.9.0)", "jinja2 (>=3.0 name = "ttp-templates" version = "0.3.6" description = "Template Text Parser Templates collections" -category = "main" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -4160,7 +3975,6 @@ docs = ["mkdocs (==1.2.4)", "mkdocs-material (==7.2.2)", "mkdocs-material-extens name = "typing-extensions" version = "4.9.0" description = "Backported and Experimental Type Hints for Python 3.8+" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4172,7 +3986,6 @@ files = [ name = "tzdata" version = "2024.1" description = "Provider of IANA time zone data" -category = "main" optional = false python-versions = ">=2" files = [ @@ -4184,7 +3997,6 @@ files = [ name = "uritemplate" version = "4.1.1" description = "Implementation of RFC 6570 URI Templates" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4196,7 +4008,6 @@ files = [ name = "urllib3" version = "1.26.18" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -4213,7 +4024,6 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "vine" version = "5.1.0" description = "Python promises." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4225,7 +4035,6 @@ files = [ name = "watchdog" version = "4.0.0" description = "Filesystem events monitoring" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -4267,7 +4076,6 @@ watchmedo = ["PyYAML (>=3.10)"] name = "wcwidth" version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" -category = "main" optional = false python-versions = "*" files = [ @@ -4279,7 +4087,6 @@ files = [ name = "wrapt" version = "1.16.0" description = "Module for decorators, wrappers and monkey patching." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -4359,7 +4166,6 @@ files = [ name = "yamllint" version = "1.34.0" description = "A linter for YAML files." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -4378,7 +4184,6 @@ dev = ["doc8", "flake8", "flake8-import-order", "rstcheck[sphinx]", "sphinx"] name = "yamlordereddictloader" version = "0.4.2" description = "YAML loader and dumper for PyYAML allowing to keep keys order." -category = "main" optional = false python-versions = "*" files = [ @@ -4393,7 +4198,6 @@ pyyaml = "*" name = "zipp" version = "3.17.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" optional = false python-versions = ">=3.8" files = [ From 15e8515c30cac20e9baa618de2d526f6297b7896 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Fri, 16 Feb 2024 21:48:49 -0600 Subject: [PATCH 074/225] fix format of yaml files --- nautobot_device_onboarding/command_mappers/arista_eos.yml | 4 ++-- nautobot_device_onboarding/command_mappers/cisco_nxos.yml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/arista_eos.yml b/nautobot_device_onboarding/command_mappers/arista_eos.yml index fb2f75d0..093f9982 100755 --- a/nautobot_device_onboarding/command_mappers/arista_eos.yml +++ b/nautobot_device_onboarding/command_mappers/arista_eos.yml @@ -1,9 +1,9 @@ --- device_onboarding: - use_textfsm: true + use_textfsm: false hostname: + command: "show hostname | json" jpath: "[*].hostname" - command: "show hostname" serial: command: "show version" jpath: "[*].serial_number" diff --git a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml index e3e9aa1c..2082e146 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml @@ -2,8 +2,8 @@ device_onboarding: use_textfsm: true hostname: - jpath: "[*].hostname" command: "show version" + jpath: "[*].hostname" serial: command: "show version" jpath: "[*].serial" From b790c279fa800fa51ed0ad75d63317c9041015f1 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Fri, 16 Feb 2024 23:20:05 -0600 Subject: [PATCH 075/225] new post processor feature from yaml , clean ups --- .../command_mappers/arista_eos.yml | 5 ++- .../command_mappers/cisco_ios.yml | 1 + .../adapters/network_importer_adapters.py | 4 +- .../diffsync/adapters/onboarding_adapters.py | 11 ++--- .../models/network_importer_models.py | 8 +++- .../diffsync/models/onboarding_models.py | 8 +++- nautobot_device_onboarding/jobs.py | 41 +++++++++++++++---- nautobot_device_onboarding/nautobot_keeper.py | 9 +++- .../nornir_plays/processor.py | 5 ++- .../tests/test_nautobot_keeper.py | 10 ++++- nautobot_device_onboarding/utils/formatter.py | 38 ++++++++--------- nautobot_device_onboarding/utils/helper.py | 9 ++-- .../utils/inventory_creator.py | 14 +++++-- 13 files changed, 110 insertions(+), 53 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/arista_eos.yml b/nautobot_device_onboarding/command_mappers/arista_eos.yml index 093f9982..739f7857 100755 --- a/nautobot_device_onboarding/command_mappers/arista_eos.yml +++ b/nautobot_device_onboarding/command_mappers/arista_eos.yml @@ -1,8 +1,8 @@ --- device_onboarding: - use_textfsm: false + use_textfsm: true hostname: - command: "show hostname | json" + command: "show hostname" jpath: "[*].hostname" serial: command: "show version" @@ -16,3 +16,4 @@ device_onboarding: mask_length: command: "show ip interface brief" jpath: "[?interface=='Management1'].ip_address" + post_processor: "{{ obj[0] | ipaddress_interface('netmask') | netmask_to_cidr }}" diff --git a/nautobot_device_onboarding/command_mappers/cisco_ios.yml b/nautobot_device_onboarding/command_mappers/cisco_ios.yml index 5ec58cd8..66449123 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_ios.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_ios.yml @@ -4,6 +4,7 @@ device_onboarding: hostname: jpath: "[*].hostname" command: "show version" + post_processor: "{{ obj[0] | upper }}" serial: command: "show version" jpath: "[*].serial[0]" diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index 9b87eb60..817ef323 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -2,6 +2,8 @@ import time +import diffsync +from diffsync.enum import DiffSyncModelFlags from nautobot.apps.choices import JobResultStatusChoices from nautobot.dcim.models import Interface from nautobot.extras.models import Job, JobResult @@ -9,8 +11,6 @@ from nautobot_ssot.contrib import NautobotAdapter from netaddr import EUI, mac_unix_expanded -import diffsync -from diffsync.enum import DiffSyncModelFlags from nautobot_device_onboarding.diffsync.models import network_importer_models diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index 2618c1f5..3ffb3d79 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -2,12 +2,12 @@ import time +import diffsync import netaddr from nautobot.apps.choices import JobResultStatusChoices from nautobot.dcim.models import Device, DeviceType, Manufacturer, Platform from nautobot.extras.models import Job, JobResult -import diffsync from nautobot_device_onboarding.diffsync.models import onboarding_models @@ -186,11 +186,10 @@ def execute_command_getter(self): def _check_data_type(self, data): """Verify the data returned from CommandGetter is not a string.""" data_type_check_result = True - if type(data) == str: + if isinstance(data, str): data_type_check_result = False return data_type_check_result - def load_manufacturers(self): """Load manufacturers into the DiffSync store.""" for ip_address in self.device_data: @@ -292,5 +291,7 @@ def load(self): self.load_device_types() self.load_devices() else: - self.job.logger.error("Data returned from CommandGetter is not the correct type. " - " No devices will be onboarded, check the CommandGetter job logs.") + self.job.logger.error( + "Data returned from CommandGetter is not the correct type. " + " No devices will be onboarded, check the CommandGetter job logs." + ) diff --git a/nautobot_device_onboarding/diffsync/models/network_importer_models.py b/nautobot_device_onboarding/diffsync/models/network_importer_models.py index 58c1010e..272dc4ad 100644 --- a/nautobot_device_onboarding/diffsync/models/network_importer_models.py +++ b/nautobot_device_onboarding/diffsync/models/network_importer_models.py @@ -2,14 +2,18 @@ from typing import List, Optional -from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist, ValidationError +from diffsync import DiffSync, DiffSyncModel +from django.core.exceptions import ( + MultipleObjectsReturned, + ObjectDoesNotExist, + ValidationError, +) from nautobot.dcim.choices import InterfaceTypeChoices from nautobot.dcim.models import Device, Interface, Location from nautobot.extras.models import Status from nautobot.ipam.models import VLAN, IPAddress, IPAddressToInterface from nautobot_ssot.contrib import NautobotModel -from diffsync import DiffSync, DiffSyncModel from nautobot_device_onboarding.utils import diffsync_utils diff --git a/nautobot_device_onboarding/diffsync/models/onboarding_models.py b/nautobot_device_onboarding/diffsync/models/onboarding_models.py index be7b1ed1..b9cea786 100644 --- a/nautobot_device_onboarding/diffsync/models/onboarding_models.py +++ b/nautobot_device_onboarding/diffsync/models/onboarding_models.py @@ -2,13 +2,17 @@ from typing import Optional -from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist, ValidationError +from diffsync import DiffSyncModel +from django.core.exceptions import ( + MultipleObjectsReturned, + ObjectDoesNotExist, + ValidationError, +) from nautobot.apps.choices import InterfaceTypeChoices from nautobot.dcim.models import Device, DeviceType, Interface, Manufacturer, Platform from nautobot.extras.models import Role, SecretsGroup, Status from nautobot_ssot.contrib import NautobotModel -from diffsync import DiffSyncModel from nautobot_device_onboarding.utils import diffsync_utils diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index b538c585..9e1b59bf 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -5,12 +5,37 @@ from diffsync.enum import DiffSyncFlags from django.conf import settings -from nautobot.apps.jobs import BooleanVar, IntegerVar, Job, MultiObjectVar, ObjectVar, StringVar +from nautobot.apps.jobs import ( + BooleanVar, + IntegerVar, + Job, + MultiObjectVar, + ObjectVar, + StringVar, +) from nautobot.core.celery import register_jobs from nautobot.dcim.models import Device, DeviceType, Location, Platform -from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices -from nautobot.extras.models import Role, SecretsGroup, SecretsGroupAssociation, Status, Tag +from nautobot.extras.choices import ( + SecretsGroupAccessTypeChoices, + SecretsGroupSecretTypeChoices, +) +from nautobot.extras.models import ( + Role, + SecretsGroup, + SecretsGroupAssociation, + Status, + Tag, +) from nautobot.ipam.models import Namespace +from nautobot_plugin_nornir.constants import NORNIR_SETTINGS +from nautobot_plugin_nornir.plugins.inventory.nautobot_orm import NautobotORMInventory +from nautobot_ssot.jobs.base import DataSource +from nornir import InitNornir +from nornir.core.plugins.inventory import ( + InventoryPluginRegister, + TransformFunctionRegister, +) + from nautobot_device_onboarding.diffsync.adapters.network_importer_adapters import ( NetworkImporterNautobotAdapter, NetworkImporterNetworkAdapter, @@ -26,13 +51,11 @@ from nautobot_device_onboarding.nornir_plays.empty_inventory import EmptyInventory from nautobot_device_onboarding.nornir_plays.logger import NornirLogger from nautobot_device_onboarding.nornir_plays.processor import ProcessorDO -from nautobot_device_onboarding.utils.helper import add_platform_parsing_info, get_job_filter +from nautobot_device_onboarding.utils.helper import ( + add_platform_parsing_info, + get_job_filter, +) from nautobot_device_onboarding.utils.inventory_creator import _set_inventory -from nautobot_plugin_nornir.constants import NORNIR_SETTINGS -from nautobot_plugin_nornir.plugins.inventory.nautobot_orm import NautobotORMInventory -from nautobot_ssot.jobs.base import DataSource -from nornir import InitNornir -from nornir.core.plugins.inventory import InventoryPluginRegister, TransformFunctionRegister InventoryPluginRegister.register("nautobot-inventory", NautobotORMInventory) InventoryPluginRegister.register("empty-inventory", EmptyInventory) diff --git a/nautobot_device_onboarding/nautobot_keeper.py b/nautobot_device_onboarding/nautobot_keeper.py index 0de8b094..b001e019 100644 --- a/nautobot_device_onboarding/nautobot_keeper.py +++ b/nautobot_device_onboarding/nautobot_keeper.py @@ -8,7 +8,14 @@ from django.core.exceptions import ValidationError from nautobot.apps.choices import PrefixTypeChoices from nautobot.dcim.choices import InterfaceTypeChoices -from nautobot.dcim.models import Device, DeviceType, Interface, Location, Manufacturer, Platform +from nautobot.dcim.models import ( + Device, + DeviceType, + Interface, + Location, + Manufacturer, + Platform, +) from nautobot.extras.models import Role, Status from nautobot.extras.models.customfields import CustomField from nautobot.ipam.models import IPAddress, Namespace, Prefix diff --git a/nautobot_device_onboarding/nornir_plays/processor.py b/nautobot_device_onboarding/nornir_plays/processor.py index 45575e32..8a972f98 100755 --- a/nautobot_device_onboarding/nornir_plays/processor.py +++ b/nautobot_device_onboarding/nornir_plays/processor.py @@ -2,12 +2,13 @@ from typing import Dict -from nautobot_device_onboarding.utils.formatter import extract_show_data from nornir.core.inventory import Host from nornir.core.task import MultiResult, Task from nornir_nautobot.exceptions import NornirNautobotException from nornir_nautobot.plugins.processors import BaseLoggingProcessor +from nautobot_device_onboarding.utils.formatter import extract_show_data + class ProcessorDO(BaseLoggingProcessor): """Processor class for Device Onboarding jobs.""" @@ -53,7 +54,7 @@ def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult "failed": result.failed, } ) - formatted_data = extract_show_data(host, result, task.parent_task.params['command_getter_job']) + formatted_data = extract_show_data(host, result, task.parent_task.params["command_getter_job"]) # revist should be able to just update self.data with full formatted_data for k, v in formatted_data.items(): self.data[host.name][k] = v diff --git a/nautobot_device_onboarding/tests/test_nautobot_keeper.py b/nautobot_device_onboarding/tests/test_nautobot_keeper.py index 5cfe8cd3..9a84bb9c 100644 --- a/nautobot_device_onboarding/tests/test_nautobot_keeper.py +++ b/nautobot_device_onboarding/tests/test_nautobot_keeper.py @@ -4,7 +4,15 @@ from django.contrib.contenttypes.models import ContentType from django.test import TestCase from nautobot.dcim.choices import InterfaceTypeChoices -from nautobot.dcim.models import Device, DeviceType, Interface, Location, LocationType, Manufacturer, Platform +from nautobot.dcim.models import ( + Device, + DeviceType, + Interface, + Location, + LocationType, + Manufacturer, + Platform, +) from nautobot.extras.choices import CustomFieldTypeChoices from nautobot.extras.models import CustomField, Role, Status from nautobot.extras.models.secrets import SecretsGroup diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index abc207db..150843be 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -5,31 +5,23 @@ import yaml from django.template import engines from jdiff import extract_data_from_json -from jinja2 import FileSystemLoader -from jinja2.sandbox import SandboxedEnvironment +from jinja2 import Environment DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), "command_mappers")) -def load_yaml_datafile(filename, config=None): +def load_yaml_datafile(filename): """Get the contents of the given YAML data file. Args: filename (str): Filename within the 'data' directory. - config (dict): Data for Jinja2 templating. """ file_path = os.path.join(DATA_DIR, filename) if not os.path.isfile(file_path): raise RuntimeError(f"No data file found at {file_path}") - if not config: - config = {} - jinja_env = SandboxedEnvironment( - loader=FileSystemLoader(DATA_DIR), autoescape=True, trim_blocks=True, lstrip_blocks=False - ) - jinja_env.filters = engines["jinja"].env.filters - template = jinja_env.get_template(filename) - populated = template.render(config) - return yaml.safe_load(populated) + with open(file_path, "r", encoding="utf-8") as yaml_file: + data = yaml.safe_load(yaml_file) + return data def extract_show_data(host, multi_result, command_getter_type): @@ -38,7 +30,11 @@ def extract_show_data(host, multi_result, command_getter_type): Args: host (host): host from task multi_result (multiResult): multiresult object from nornir + command_getter_type (str): to know what dict to pull, device_onboarding or network_importer. """ + jinja_env = Environment(autoescape=True, trim_blocks=True, lstrip_blocks=False) + jinja_env.filters = engines["jinja"].env.filters + host_platform = host.platform if host_platform == "cisco_xe": host_platform = "cisco_ios" @@ -48,10 +44,14 @@ def extract_show_data(host, multi_result, command_getter_type): for default_dict_field, command_info in command_jpaths[command_getter_type].items(): if not default_dict_field == "use_textfsm": if command_info["command"] == multi_result[0].name: - extracted_value = extract_data_from_json(multi_result[0].result, command_info["jpath"]) - if isinstance(extracted_value, list) and len(extracted_value) == 1: - extracted_value = extracted_value[0] - if "/" in extracted_value and default_dict_field == "mask_length": - extracted_value = extracted_value.split("/")[1] - result_dict[default_dict_field] = extracted_value + j2_rendered_jpath_template = jinja_env.from_string(command_info["jpath"]) + j2_rendered_jpath = j2_rendered_jpath_template.render(host_info=host.name) + extracted_value = extract_data_from_json(multi_result[0].result, j2_rendered_jpath) + if command_info.get("post_processor"): + transform_template = jinja_env.from_string(command_info["post_processor"]) + extracted_processed = transform_template.render(obj=extracted_value) + else: + if isinstance(extracted_value, list) and len(extracted_value) == 1: + extracted_processed = extracted_value[0] + result_dict[default_dict_field] = extracted_processed return result_dict diff --git a/nautobot_device_onboarding/utils/helper.py b/nautobot_device_onboarding/utils/helper.py index df5a7a92..4156dc91 100644 --- a/nautobot_device_onboarding/utils/helper.py +++ b/nautobot_device_onboarding/utils/helper.py @@ -2,9 +2,10 @@ from nautobot.dcim.filters import DeviceFilterSet from nautobot.dcim.models import Device -from nautobot_device_onboarding.utils.formatter import load_yaml_datafile from nornir_nautobot.exceptions import NornirNautobotException +from nautobot_device_onboarding.utils.formatter import load_yaml_datafile + FIELDS_PK = { "location", "role", @@ -50,15 +51,15 @@ def get_job_filter(data=None): return devices_filtered.qs -def _get_platform_parsing_info(host_platform, data): +def _get_platform_parsing_info(host_platform): """Open and load yaml file.""" if host_platform == "cisco_xe": host_platform = "cisco_ios" - yaml_parsing_info = load_yaml_datafile(f"{host_platform}.yml", config=data) + yaml_parsing_info = load_yaml_datafile(f"{host_platform}.yml") return yaml_parsing_info def add_platform_parsing_info(host): """This nornir transform function adds platform parsing info.""" - parsing_info = _get_platform_parsing_info(host.platform, data={"host_info": host}) + parsing_info = _get_platform_parsing_info(host.platform) host.data.update({"platform_parsing_info": parsing_info}) diff --git a/nautobot_device_onboarding/utils/inventory_creator.py b/nautobot_device_onboarding/utils/inventory_creator.py index b7eff582..d4327dc2 100755 --- a/nautobot_device_onboarding/utils/inventory_creator.py +++ b/nautobot_device_onboarding/utils/inventory_creator.py @@ -1,12 +1,16 @@ """Inventory Creator and Helpers.""" from django.conf import settings -from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices -from nautobot_device_onboarding.exceptions import OnboardException -from nautobot_device_onboarding.utils.helper import _get_platform_parsing_info +from nautobot.extras.choices import ( + SecretsGroupAccessTypeChoices, + SecretsGroupSecretTypeChoices, +) from netmiko import SSHDetect from nornir.core.inventory import ConnectionOptions, Host +from nautobot_device_onboarding.exceptions import OnboardException +from nautobot_device_onboarding.utils.helper import _get_platform_parsing_info + def _parse_credentials(credentials): """Parse and return dictionary of credentials.""" @@ -68,7 +72,9 @@ def _set_inventory(host_ip, platform, port, secrets_group): platform = platform.network_driver else: platform = guess_netmiko_device_type(host_ip, username, password, port) - parsing_info = _get_platform_parsing_info(platform, data={"host_info": host_ip}) + parsing_info = _get_platform_parsing_info(platform) + print(parsing_info) + print(type(parsing_info)) host = Host( name=host_ip, From 821c001e455bc70fbb4ef00d505f12a5041564d9 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Sat, 17 Feb 2024 00:00:01 -0600 Subject: [PATCH 076/225] add a more advanced jdiff jpath query to ios for ni interfaces --- .../command_mappers/cisco_ios.yml | 95 ++----------------- 1 file changed, 6 insertions(+), 89 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/cisco_ios.yml b/nautobot_device_onboarding/command_mappers/cisco_ios.yml index 5ec58cd8..72ac0bca 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_ios.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_ios.yml @@ -18,93 +18,10 @@ device_onboarding: jpath: "[?ip_address=='{{ host_info }}'].prefix_length" network_importer: use_textfsm: true - hostname: - jpath: "[*].hostname" + serial: command: "show version" - - # commands = ["show version", "show interfaces", "show vlan", "show interfaces switchport"] - # all_results = {} - - # for command in commands: - # command_result = nornir_obj.run(task=netmiko_send_command, command_string=command, use_textfsm=True) - # # all_results = format_ni_data_cisco_ios(command=command,command_result=command_result) - # for host_name, result in command_result.items(): - # if command_result.failed: - # failed_results = {host_name: {"Failed": True, "subtask_result": result.result}} - # return failed_results - # if host_name not in all_results: - # all_results[host_name] = {"interfaces": {}, "serial": ""} - - # if command == "show version": - # self.logger.info(f"Show version: {result.result}") - # serial_info = result.result[0] - # self.logger.info(f"Serial Info: {serial_info}") - # serial_number = serial_info.get("serial") - # all_results[host_name]["serial"] = serial_number[0] - # elif command == "show interfaces": - # self.logger.info(f"Interfaces: {result.result}") - # for interface_info in result.result: - # self.logger.info(f"Interface Info: {interface_info}") - # interface_name = interface_info.get("interface") - # # media_type = interface_info.get("media_type") - # hardware_type = interface_info.get("hardware_type") - # mtu = interface_info.get("mtu") - # description = interface_info.get("description") - # mac_address = interface_info.get("mac_address") - # link_status = interface_info.get("link_status") - # ip_address = interface_info.get("ip_address") - # mask_length = interface_info.get("prefix_length") - - # link_status = bool(link_status == "up") - - # interface_type = normalize_interface_type(hardware_type) - - # all_results[host_name]["interfaces"][interface_name] = { - # "mtu": mtu, - # "type": interface_type, - # "description": description, - # "mac_address": mac_address, - # "enabled": link_status, - # "ip_addresses": [{"host": ip_address, "mask_length": mask_length}], - # } - # elif command == "show vlan": - # vlan_id_name_map = {} - # self.logger.info(f"Vlan: {result.result}") - # for vlan_info in result.result: - # self.logger.info(f"Vlan info: {vlan_info}") - # vlan_id = vlan_info.get("vlan_id") - # vlan_name = vlan_info.get("vlan_name") - # vlan_id_name_map[vlan_id] = vlan_name - # self.logger.info(f"Vlan ID Name Map: {vlan_id_name_map}") - - # elif command == "show interfaces switchport": - # self.logger.info(f"Interfaces Switchport: {result.result}") - # for interface_info in result.result: - # self.logger.info(f"Interface Info: {interface_info}") - # interface_name = normalize_interface_name(interface_info.get("interface")) - # self.logger.info(f"Interface Name: {interface_name}") - # interface_mode = normalize_tagged_interface(interface_info.get("admin_mode")) - # access_vlan = interface_info.get("access_vlan") - # tagged_vlans = interface_info.get("trunking_vlans", []) - # tagged_vlans_list = tagged_vlans[0].split(",") - # self.logger.info(f"tagged_vlans: {tagged_vlans}") - - # if interface_name in all_results[host_name]["interfaces"]: - # all_results[host_name]["interfaces"][interface_name]["mode"] = interface_mode - # all_results[host_name]["interfaces"][interface_name]["access_vlan"] = { - # "vlan_id": access_vlan, - # "vlan_name": vlan_id_name_map.get(access_vlan, ""), - # } - - # # Prepare tagged VLANs info - # tagged_vlans_info = [ - # {"vlan_id": vlan_id, "vlan_name": vlan_id_name_map.get(vlan_id, "Unknown VLAN")} - # for vlan_id in tagged_vlans_list - # if vlan_id in vlan_id_name_map - # ] - # self.logger.info(f"tagged_vlans_info: {tagged_vlans_info}") - # all_results[host_name]["interfaces"][interface_name][ - # "tagged_vlans" - # ] = tagged_vlans_info - # else: - # self.logger.info(f"Interface {interface_name} not found in interfaces list.") + jpath: "[*].serial[0]" + interfaces: + command: "show interfaces" + jpath: "[*].[$interface$,link_status,protocol_status,hardware_type,mac_address,ip_address,prefix_length,mtu]" + post_processor: "{{ obj }}" From 68299d645b7d70212724b8b0b70a8e14df3ff410 Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Mon, 19 Feb 2024 19:06:17 +0000 Subject: [PATCH 077/225] updates for parsing --- nautobot_device_onboarding/command_mappers/cisco_ios.yml | 4 ++-- nautobot_device_onboarding/command_mappers/cisco_nxos.yml | 4 ++-- .../diffsync/adapters/onboarding_adapters.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/cisco_ios.yml b/nautobot_device_onboarding/command_mappers/cisco_ios.yml index 5d3b6749..f5fac7f5 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_ios.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_ios.yml @@ -9,8 +9,8 @@ device_onboarding: command: "show version" jpath: "[*].serial[0]" device_type: - command: "show inventory" - jpath: "[?name=='Chassis'].pid" + command: "show version" + jpath: "[*].hardware[0]" mgmt_interface: command: "show interfaces" jpath: "[?ip_address=='{{ host_info }}'].interface" diff --git a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml index 2082e146..8074c1c9 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml @@ -12,7 +12,7 @@ device_onboarding: jpath: "[*].platform" mgmt_interface: command: "show interface" - jpath: "[?ip_address=='{{ host_info }}'].interface" + jpath: "[?ip_address=='{{ host_info }}'].interface || [`mgmt0`]" mask_length: command: "show interface" - jpath: "[?ip_address=='{{ host_info }}'].prefix_length" + jpath: "[?ip_address=='{{ host_info }}'].prefix_length || [`31`]" diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index 5aa868b6..eac4b28d 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -281,7 +281,7 @@ def load_devices(self): def load(self): """Load network data.""" self._validate_ip_addresses(self.job.ip_addresses) - # self.execute_command_getter() + self.execute_command_getter() data_type_check = self._check_data_type(self.device_data) if data_type_check: self.load_manufacturers() From 976bdd49810ebf72b85f9a05934633623836c60d Mon Sep 17 00:00:00 2001 From: David Cates Date: Tue, 20 Feb 2024 08:06:33 -0700 Subject: [PATCH 078/225] bump version --- nautobot_device_onboarding/diffsync/mock_data.py | 4 ++-- pyproject.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/mock_data.py b/nautobot_device_onboarding/diffsync/mock_data.py index 7741c502..1e9f7152 100644 --- a/nautobot_device_onboarding/diffsync/mock_data.py +++ b/nautobot_device_onboarding/diffsync/mock_data.py @@ -176,11 +176,11 @@ "hostname": "demo-cisco-xe1", "serial": "9ABUXU581111", "device_type": "CSR1000V17", - # "mgmt_interface": "GigabitEthernet20", + "mgmt_interface": "GigabitEthernet20", "manufacturer": "Cisco", "platform": "IOS-test", "network_driver": "cisco_ios", - # "mask_length": 16, + "mask_length": 16, }, "10.1.1.10": { "hostname": "demo-cisco-xe2", diff --git a/pyproject.toml b/pyproject.toml index 79fd1b17..7b1e155a 100755 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nautobot-device-onboarding" -version = "3.0.2a2" +version = "3.0.2a3" description = "A app for Nautobot to easily onboard new devices." authors = ["Network to Code, LLC "] license = "Apache-2.0" From c8975a77770126c0b80adaa67ea58462eb36e3b2 Mon Sep 17 00:00:00 2001 From: David Cates Date: Tue, 20 Feb 2024 15:19:22 -0700 Subject: [PATCH 079/225] update network importer --- .../adapters/network_importer_adapters.py | 139 +++++++++++------- .../diffsync/adapters/onboarding_adapters.py | 40 +++-- .../diffsync/mock_data.py | 20 +-- .../models/network_importer_models.py | 38 ++--- .../diffsync/models/onboarding_models.py | 6 +- nautobot_device_onboarding/jobs.py | 36 +---- nautobot_device_onboarding/nautobot_keeper.py | 9 +- .../tests/test_nautobot_keeper.py | 10 +- .../utils/diffsync_utils.py | 22 +++ .../utils/inventory_creator.py | 5 +- 10 files changed, 170 insertions(+), 155 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index 817ef323..7b791099 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -4,6 +4,7 @@ import diffsync from diffsync.enum import DiffSyncModelFlags +from django.core.exceptions import ValidationError from nautobot.apps.choices import JobResultStatusChoices from nautobot.dcim.models import Interface from nautobot.extras.models import Job, JobResult @@ -12,6 +13,7 @@ from netaddr import EUI, mac_unix_expanded from nautobot_device_onboarding.diffsync.models import network_importer_models +from nautobot_device_onboarding.utils import diffsync_utils class FilteredNautobotAdapter(NautobotAdapter): @@ -55,8 +57,20 @@ def load_param_mac_address(self, parameter_name, database_object): return str(database_object.mac_address) def load_ip_addresses(self): - """Load IP addresses into the DiffSync store.""" - for ip_address in IPAddress.objects.filter(parent__namespace__name=self.job.namespace.name): + """Load IP addresses into the DiffSync store. + + Only IP Addresses that were returned by the CommandGetter job should be loaded. + """ + ip_address_hosts = set() + for _, device_data in self.job.command_getter_result.items(): + for _, interface_data in device_data["interfaces"].items(): + for ip_address in interface_data["ip_addresses"]: + ip_address_hosts.add(ip_address["host"]) + + for ip_address in IPAddress.objects.filter( + host__in=ip_address_hosts, + parent__namespace__name=self.job.namespace.name, + ): network_ip_address = self.ip_address( diffsync=self, host=ip_address.host, @@ -72,11 +86,15 @@ def load_ip_addresses(self): self.job.logger.debug(f"{network_ip_address} loaded.") except diffsync.exceptions.ObjectAlreadyExists: self.job.logger.warning( - f"{network_ip_address} is already loaded to the " "DiffSync store. This is a duplicate IP Address." + f"{network_ip_address} is already loaded to the DiffSync store. This is a duplicate IP Address." ) def load_vlans(self): - """Load vlans into the Diffsync store.""" + """ + Load Vlans into the Diffsync store. + + Only Vlans that were returned by the CommandGetter job should be loaded. + """ for vlan in VLAN.objects.all(): network_vlan = self.vlan( diffsync=self, @@ -87,12 +105,17 @@ def load_vlans(self): try: network_vlan.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST self.add(network_vlan) + if self.job.debug: + self.job.logger.debug(f"Vlan {network_vlan} loaded.") except diffsync.exceptions.ObjectAlreadyExists: pass def load_tagged_vlans_to_interface(self): - """Load a model representing tagged vlan assignments to the Diffsync store.""" - for interface in Interface.objects.filter(device__in=self.job.filtered_devices): + """Load a model representing tagged vlan assignments to the Diffsync store. + + Only Vlan assignments that were returned by the CommandGetter job should be loaded. + """ + for interface in Interface.objects.filter(device__in=self.job.devices_to_load): tagged_vlans = [] for vlan in interface.tagged_vlans.all(): vlan_dict = {} @@ -106,18 +129,28 @@ def load_tagged_vlans_to_interface(self): name=interface.name, tagged_vlans=tagged_vlans, ) + network_tagged_vlans_to_interface.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST self.add(network_tagged_vlans_to_interface) + if self.job.debug: + self.job.logger.debug(f"Vlan to interface: {network_tagged_vlans_to_interface} loaded.") def load_lag_to_interface(self): - """Load a model representing lag assignments to the Diffsync store.""" - for interface in Interface.objects.filter(device__in=self.job.filtered_devices): + """ + Load a model representing lag assignments to the Diffsync store. + + Only Lag assignments that were returned by the CommandGetter job should be loaded. + """ + for interface in Interface.objects.filter(device__in=self.job.devices_to_load): network_lag_to_interface = self.lag_to_interface( diffsync=self, device__name=interface.device.name, name=interface.name, - lag__interface__name=interface.lag.name if interface.lag else None, + lag__interface__name=interface.lag.name if interface.lag else "", ) + network_lag_to_interface.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST self.add(network_lag_to_interface) + if self.job.debug: + self.job.logger.debug(f"Lag to interface {network_lag_to_interface} loaded.") def load(self): """Generic implementation of the load function.""" @@ -155,8 +188,6 @@ def __init__(self, *args, job, sync=None, **kwargs): self.job = job self.sync = sync - device_data = None - device = network_importer_models.NetworkImporterDevice interface = network_importer_models.NetworkImporterInterface ip_address = network_importer_models.NetworkImporterIPAddress @@ -174,24 +205,23 @@ def __init__(self, *args, job, sync=None, **kwargs): "lag_to_interface", ] - # def _handle_failed_connections(self, device_data): - # """ - # Handle result data from failed device connections. - - # If a device fails to return expected data, log the result - # and remove it from the data to be loaded into the diffsync store. - # """ - # failed_ip_addresses = [] - - # for ip_address in device_data: - # if device_data[ip_address].get("failed"): - # self.job.logger.error(f"Connection or data error for {ip_address}. This device will not be onboarded.") - # if self.job.debug: - # self.job.logger.error(device_data[ip_address].get("subtask_result")) - # failed_ip_addresses.append(ip_address) - # for ip_address in failed_ip_addresses: - # del device_data[ip_address] - # self.device_data = device_data + def _handle_failed_connections(self, device_data): + """ + Handle result data from failed device connections. + + If a device fails to return expected data, log the result + and remove it from the data to be loaded into the diffsync store. + """ + failed_device_connections = [] + + for hostname in device_data: + if device_data[hostname].get("failed"): + self.job.logger.error(f"Connection or data error for {hostname}. " "This device will not be synced.") + failed_device_connections.append(hostname) + for hostname in failed_device_connections: + del device_data[hostname] + self.job.command_getter_result = device_data + self.job.devices_to_load = diffsync_utils.generate_device_querset_from_command_getter_result(device_data) def execute_command_getter(self): """Start the CommandGetterDO job to query devices for data.""" @@ -209,9 +239,18 @@ def execute_command_getter(self): break if self.job.debug: self.job.logger.debug(f"Command Getter Job Result: {result.result}") - # TODO: Handle failed connections - # self._handle_failed_connections(device_data=result.result) - self.device_data = result.result + # verify data returned is a dict + data_type_check = diffsync_utils.check_data_type(result.result) + if self.job.debug: + self.job.logger.debug(f"CommandGetter data type check resut: {data_type_check}") + if data_type_check: + self._handle_failed_connections(device_data=result.result) + else: + self.job.logger.error( + "Data returned from CommandGetter is not the correct type. " + "No devices will be onboarded, check the CommandGetter job logs." + ) + raise ValidationError("Unexpected data returend from CommandGetter.") def _process_mac_address(self, mac_address): """Convert a mac address to match the value stored by Nautobot.""" @@ -219,7 +258,7 @@ def _process_mac_address(self, mac_address): def load_devices(self): """Load devices into the DiffSync store.""" - for hostname, device_data in self.device_data.items(): + for hostname, device_data in self.job.command_getter_result.items(): network_device = self.device(diffsync=self, name=hostname, serial=device_data["serial"]) self.add(network_device) if self.job.debug: @@ -253,7 +292,7 @@ def load_interface(self, hostname, interface_name, interface_data): def load_ip_addresses(self): """Load IP addresses into the DiffSync store.""" - for hostname, device_data in self.device_data.items(): + for hostname, device_data in self.job.command_getter_result.items(): for interface_name, interface_data in device_data["interfaces"].items(): for ip_address in interface_data["ip_addresses"]: if self.job.debug: @@ -267,7 +306,6 @@ def load_ip_addresses(self): status__name=self.job.ip_address_status.name, ) try: - network_ip_address.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST self.add(network_ip_address) if self.job.debug: self.job.logger.debug(f"{network_ip_address} loaded.") @@ -280,14 +318,12 @@ def load_ip_addresses(self): def load_vlans(self): """Load vlans into the Diffsync store.""" location_names = {} - for device in self.job.filtered_devices: + for device in self.job.devices_to_load: location_names[device.name] = device.location.name - for hostname, device_data in self.device_data.items(): - for interface_name, interface_data in device_data["interfaces"].items(): + for hostname, device_data in self.job.command_getter_result.items(): + for _, interface_data in device_data["interfaces"].items(): # add tagged vlans - if self.job.debug: - self.job.logger.debug(f"Loading tagged vlans for {interface_name}") for tagged_vlan in interface_data["tagged_vlans"]: network_vlan = self.vlan( diffsync=self, @@ -296,10 +332,9 @@ def load_vlans(self): location__name=location_names.get(hostname, ""), ) try: - network_vlan.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST self.add(network_vlan) if self.job.debug: - self.job.logger.debug(f"tagged vlan {network_vlan} loaded.") + self.job.logger.debug(f"Tagged Vlan {network_vlan} loaded.") except diffsync.exceptions.ObjectAlreadyExists: pass # check for untagged vlan and add if necessary @@ -311,16 +346,15 @@ def load_vlans(self): location__name=location_names.get(hostname, ""), ) try: - network_vlan.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST self.add(network_vlan) if self.job.debug: - self.job.logger.debug(f"untagged vlan {network_vlan} loaded.") + self.job.logger.debug(f"Untagged Vlan {network_vlan} loaded.") except diffsync.exceptions.ObjectAlreadyExists: pass def load_ip_address_to_interfaces(self): """Load ip address interface assignments into the Diffsync store.""" - for hostname, device_data in self.device_data.items(): + for hostname, device_data in self.job.command_getter_result.items(): for interface_name, interface_data in device_data["interfaces"].items(): for ip_address in interface_data["ip_addresses"]: network_ip_address_to_interface = self.ipaddress_to_interface( @@ -332,11 +366,11 @@ def load_ip_address_to_interfaces(self): ) self.add(network_ip_address_to_interface) if self.job.debug: - self.job.logger.debug(f"{network_ip_address_to_interface} loaded.") + self.job.logger.debug(f"IP Address to interface {network_ip_address_to_interface} loaded.") def load_tagged_vlans_to_interface(self): """Load tagged vlan to interface assignments into the Diffsync store.""" - for hostname, device_data in self.device_data.items(): + for hostname, device_data in self.job.command_getter_result.items(): for interface_name, interface_data in device_data["interfaces"].items(): network_tagged_vlans_to_interface = self.tagged_vlans_to_interface( diffsync=self, @@ -345,24 +379,25 @@ def load_tagged_vlans_to_interface(self): tagged_vlans=interface_data["tagged_vlans"], ) self.add(network_tagged_vlans_to_interface) + if self.job.debug: + self.job.logger.debug(f"Tagged Vlan to interface {network_tagged_vlans_to_interface} loaded.") def load_lag_to_interface(self): """Load lag interface assignments into the Diffsync store.""" - for hostname, device_data in self.device_data.items(): + for hostname, device_data in self.job.command_getter_result.items(): for interface_name, interface_data in device_data["interfaces"].items(): network_lag_to_interface = self.lag_to_interface( diffsync=self, device__name=hostname, name=interface_name, - lag__interface__name=interface_data["lag"] if interface_data["lag"] else None, + lag__interface__name=interface_data["lag"] if interface_data["lag"] else "", ) self.add(network_lag_to_interface) + if self.job.debug: + self.job.logger.debug(f"Lag to interface {network_lag_to_interface} loaded.") def load(self): """Load network data.""" - # TODO: Function for comparing incoming hostnames to nautobot hostnames loaded for sync. - # remove missing hostnames from nautobot side of the sync (self.job.filtered_devices). - self.execute_command_getter() self.load_ip_addresses() if self.job.sync_vlans: diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index eac4b28d..ee13d848 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -4,11 +4,13 @@ import diffsync import netaddr +from django.core.exceptions import ValidationError from nautobot.apps.choices import JobResultStatusChoices from nautobot.dcim.models import Device, DeviceType, Manufacturer, Platform from nautobot.extras.models import Job, JobResult from nautobot_device_onboarding.diffsync.models import onboarding_models +from nautobot_device_onboarding.utils import diffsync_utils class OnboardingNautobotAdapter(diffsync.DiffSync): @@ -62,7 +64,7 @@ def load_device_types(self): model=device_type.model, part_number=device_type.model, manufacturer__name=device_type.manufacturer.name, - ) # type: ignore + ) self.add(onboarding_device_type) if self.job.debug: self.job.logger.debug(f"DeviceType: {device_type.model} loaded.") @@ -149,7 +151,7 @@ def _handle_failed_connections(self, device_data): for ip_address in device_data: if device_data[ip_address].get("failed"): - self.job.logger.error(f"Connection or data error for {ip_address}. This device will not be onboarded.") + self.job.logger.error(f"Connection or data error for {ip_address}. " "This device will not be synced.") failed_ip_addresses.append(ip_address) for ip_address in failed_ip_addresses: del device_data[ip_address] @@ -179,14 +181,17 @@ def execute_command_getter(self): break if self.job.debug: self.job.logger.debug(f"Command Getter Job Result: {result.result}") - self._handle_failed_connections(device_data=result.result) - - def _check_data_type(self, data): - """Verify the data returned from CommandGetter is not a string.""" - data_type_check_result = True - if isinstance(data, str): - data_type_check_result = False - return data_type_check_result + data_type_check = diffsync_utils.check_data_type(result.result) + if self.job.debug: + self.job.logger.debug(f"CommandGetter data type check resut: {data_type_check}") + if data_type_check: + self._handle_failed_connections(device_data=result.result) + else: + self.job.logger.error( + "Data returned from CommandGetter is not the correct type. " + "No devices will be onboarded, check the CommandGetter job logs." + ) + raise ValidationError("Unexpected data returend from CommandGetter.") def load_manufacturers(self): """Load manufacturers into the DiffSync store.""" @@ -282,14 +287,7 @@ def load(self): """Load network data.""" self._validate_ip_addresses(self.job.ip_addresses) self.execute_command_getter() - data_type_check = self._check_data_type(self.device_data) - if data_type_check: - self.load_manufacturers() - self.load_platforms() - self.load_device_types() - self.load_devices() - else: - self.job.logger.error( - "Data returned from CommandGetter is not the correct type. " - " No devices will be onboarded, check the CommandGetter job logs." - ) + self.load_manufacturers() + self.load_platforms() + self.load_device_types() + self.load_devices() diff --git a/nautobot_device_onboarding/diffsync/mock_data.py b/nautobot_device_onboarding/diffsync/mock_data.py index 1e9f7152..5b00c618 100644 --- a/nautobot_device_onboarding/diffsync/mock_data.py +++ b/nautobot_device_onboarding/diffsync/mock_data.py @@ -11,7 +11,7 @@ "status": "Active", "type": "100base-tx", "ip_addresses": [ - {"host": "10.1.1.8", "mask_length": 32}, + {"host": "10.1.1.8", "mask_length": 16}, ], "mac_address": "d8b1.905c.7130", "mtu": "1500", @@ -102,14 +102,14 @@ }, }, "demo-cisco-xe2": { - "serial": "9ABUXU581234", + "serial": "9ABUXU5882222", "interfaces": { "GigabitEthernet1": { "mgmt_only": True, "status": "Active", "type": "100base-tx", "ip_addresses": [ - {"host": "10.1.1.8", "mask_length": 32}, + {"host": "10.1.2.8", "mask_length": 24}, ], "mac_address": "d8b1.905c.5170", "mtu": "1500", @@ -125,14 +125,14 @@ "status": "Active", "type": "100base-tx", "ip_addresses": [ - {"host": "10.1.1.9", "mask_length": 24}, + {"host": "10.1.2.9", "mask_length": 24}, ], "mac_address": "d8b1.905c.5171", "mtu": "1500", "description": "uplink Po1", "enabled": True, "802.1Q_mode": "", - "lag": "Po2", + "lag": "Po1", "untagged_vlan": "", "tagged_vlans": [], }, @@ -141,8 +141,8 @@ "status": "Active", "type": "100base-tx", "ip_addresses": [ - {"host": "10.1.1.10", "mask_length": 24}, - {"host": "10.1.1.11", "mask_length": 22}, + {"host": "10.1.2.10", "mask_length": 24}, + {"host": "10.1.2.11", "mask_length": 22}, ], "mac_address": "d8b1.905c.5172", "mtu": "1500", @@ -153,7 +153,7 @@ "untagged_vlan": "", "tagged_vlans": [{"name": "vlan40", "id": "40"}, {"name": "vlan50", "id": "50"}], }, - "Po2": { + "Po1": { "mgmt_only": False, "status": "Active", "type": "lag", @@ -176,7 +176,7 @@ "hostname": "demo-cisco-xe1", "serial": "9ABUXU581111", "device_type": "CSR1000V17", - "mgmt_interface": "GigabitEthernet20", + "mgmt_interface": "GigabitEthernet1", "manufacturer": "Cisco", "platform": "IOS-test", "network_driver": "cisco_ios", @@ -186,7 +186,7 @@ "hostname": "demo-cisco-xe2", "serial": "9ABUXU5882222", "device_type": "CSR1000V2", - "mgmt_interface": "GigabitEthernet5", + "mgmt_interface": "GigabitEthernet1", "manufacturer": "Cisco", "platform": "IOS", "network_driver": "cisco_ios", diff --git a/nautobot_device_onboarding/diffsync/models/network_importer_models.py b/nautobot_device_onboarding/diffsync/models/network_importer_models.py index 272dc4ad..b188e144 100644 --- a/nautobot_device_onboarding/diffsync/models/network_importer_models.py +++ b/nautobot_device_onboarding/diffsync/models/network_importer_models.py @@ -3,11 +3,7 @@ from typing import List, Optional from diffsync import DiffSync, DiffSyncModel -from django.core.exceptions import ( - MultipleObjectsReturned, - ObjectDoesNotExist, - ValidationError, -) +from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist, ValidationError from nautobot.dcim.choices import InterfaceTypeChoices from nautobot.dcim.models import Device, Interface, Location from nautobot.extras.models import Status @@ -60,9 +56,13 @@ class NetworkImporterDevice(FilteredNautobotModel): @classmethod def _get_queryset(cls, diffsync: "DiffSync"): - """Get the queryset used to load the models data from Nautobot.""" + """Get the queryset used to load the models data from Nautobot. + + job.command_getter_result contains the result from the CommandGetter job. + Only devices that actually responded with data should be considered for the sync. + """ if diffsync.job.filtered_devices: - return diffsync.job.filtered_devices + return diffsync.job.devices_to_load else: diffsync.job.logger.error("No device filter options were provided, no devices will be synced.") return cls._model.objects.none() @@ -83,9 +83,9 @@ def create(cls, diffsync, ids, attrs): return None def delete(self): - """Delete the ORM object corresponding to this diffsync object.""" + """Prevent device deletion.""" self.diffsync.job.logger.error(f"{self} will not be deleted.") - return super().delete() + return None class NetworkImporterInterface(FilteredNautobotModel): @@ -151,18 +151,22 @@ def create(cls, diffsync, ids, attrs): def update(self, attrs): """Update an existing IPAddressToInterface object.""" - ip_address = IPAddress.objects.get(**self.get_identifiers()) - + try: + ip_address = IPAddress.objects.get(host=self.host, parent__namespace=self.diffsync.job.namespace) + except ObjectDoesNotExist as err: + self.job.logger.error(f"{self} failed to update, {err}") if self.diffsync.job.debug: self.diffsync.job.logger.debug(f"Updating {self} with attrs: {attrs}") if attrs.get("mask_length"): ip_address.mask_length = attrs["mask_length"] if attrs.get("status__name"): ip_address.status = Status.objects.get(name=attrs["status__name"]) - try: - ip_address.validated_save() - except ValidationError as err: - self.job.logger.error(f"{self} failed to update, {err}") + if attrs.get("ip_version"): + ip_address.status = attrs["ip_version"] + try: + ip_address.validated_save() + except ValidationError as err: + self.job.logger.error(f"{self} failed to update, {err}") return super().update(attrs) @@ -173,17 +177,15 @@ class NetworkImporterIPAddressToInterface(FilteredNautobotModel): _model = IPAddressToInterface _modelname = "ipaddress_to_interface" _identifiers = ("interface__device__name", "interface__name", "ip_address__host") - _attributes = ("ip_address__mask_length",) interface__device__name: str interface__name: str ip_address__host: str - ip_address__mask_length: str @classmethod def _get_queryset(cls, diffsync: "DiffSync"): """Get the queryset used to load the models data from Nautobot.""" - return IPAddressToInterface.objects.filter(interface__device__in=diffsync.job.filtered_devices) + return IPAddressToInterface.objects.filter(interface__device__in=diffsync.job.devices_to_load) class NetworkImporterVLAN(DiffSyncModel): diff --git a/nautobot_device_onboarding/diffsync/models/onboarding_models.py b/nautobot_device_onboarding/diffsync/models/onboarding_models.py index b9cea786..2b8cdbb6 100644 --- a/nautobot_device_onboarding/diffsync/models/onboarding_models.py +++ b/nautobot_device_onboarding/diffsync/models/onboarding_models.py @@ -3,11 +3,7 @@ from typing import Optional from diffsync import DiffSyncModel -from django.core.exceptions import ( - MultipleObjectsReturned, - ObjectDoesNotExist, - ValidationError, -) +from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist, ValidationError from nautobot.apps.choices import InterfaceTypeChoices from nautobot.dcim.models import Device, DeviceType, Interface, Manufacturer, Platform from nautobot.extras.models import Role, SecretsGroup, Status diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 9e1b59bf..123f6b2d 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -5,36 +5,17 @@ from diffsync.enum import DiffSyncFlags from django.conf import settings -from nautobot.apps.jobs import ( - BooleanVar, - IntegerVar, - Job, - MultiObjectVar, - ObjectVar, - StringVar, -) +from nautobot.apps.jobs import BooleanVar, IntegerVar, Job, MultiObjectVar, ObjectVar, StringVar from nautobot.core.celery import register_jobs from nautobot.dcim.models import Device, DeviceType, Location, Platform -from nautobot.extras.choices import ( - SecretsGroupAccessTypeChoices, - SecretsGroupSecretTypeChoices, -) -from nautobot.extras.models import ( - Role, - SecretsGroup, - SecretsGroupAssociation, - Status, - Tag, -) +from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices +from nautobot.extras.models import Role, SecretsGroup, SecretsGroupAssociation, Status, Tag from nautobot.ipam.models import Namespace from nautobot_plugin_nornir.constants import NORNIR_SETTINGS from nautobot_plugin_nornir.plugins.inventory.nautobot_orm import NautobotORMInventory from nautobot_ssot.jobs.base import DataSource from nornir import InitNornir -from nornir.core.plugins.inventory import ( - InventoryPluginRegister, - TransformFunctionRegister, -) +from nornir.core.plugins.inventory import InventoryPluginRegister, TransformFunctionRegister from nautobot_device_onboarding.diffsync.adapters.network_importer_adapters import ( NetworkImporterNautobotAdapter, @@ -51,10 +32,7 @@ from nautobot_device_onboarding.nornir_plays.empty_inventory import EmptyInventory from nautobot_device_onboarding.nornir_plays.logger import NornirLogger from nautobot_device_onboarding.nornir_plays.processor import ProcessorDO -from nautobot_device_onboarding.utils.helper import ( - add_platform_parsing_info, - get_job_filter, -) +from nautobot_device_onboarding.utils.helper import add_platform_parsing_info, get_job_filter from nautobot_device_onboarding.utils.inventory_creator import _set_inventory InventoryPluginRegister.register("nautobot-inventory", NautobotORMInventory) @@ -382,7 +360,9 @@ class SSOTNetworkImporter(DataSource): # pylint: disable=too-many-instance-attr def __init__(self): """Initialize SSOTNetworkImporter.""" super().__init__() - self.filtered_devices = None + self.filtered_devices = None # Queryset of devices based on form inputs + self.command_getter_result = None # Dict result from CommandGetter job + self.devices_to_load = None # Queryset consisting of devices that responded class Meta: """Metadata about this Job.""" diff --git a/nautobot_device_onboarding/nautobot_keeper.py b/nautobot_device_onboarding/nautobot_keeper.py index b001e019..0de8b094 100644 --- a/nautobot_device_onboarding/nautobot_keeper.py +++ b/nautobot_device_onboarding/nautobot_keeper.py @@ -8,14 +8,7 @@ from django.core.exceptions import ValidationError from nautobot.apps.choices import PrefixTypeChoices from nautobot.dcim.choices import InterfaceTypeChoices -from nautobot.dcim.models import ( - Device, - DeviceType, - Interface, - Location, - Manufacturer, - Platform, -) +from nautobot.dcim.models import Device, DeviceType, Interface, Location, Manufacturer, Platform from nautobot.extras.models import Role, Status from nautobot.extras.models.customfields import CustomField from nautobot.ipam.models import IPAddress, Namespace, Prefix diff --git a/nautobot_device_onboarding/tests/test_nautobot_keeper.py b/nautobot_device_onboarding/tests/test_nautobot_keeper.py index 9a84bb9c..5cfe8cd3 100644 --- a/nautobot_device_onboarding/tests/test_nautobot_keeper.py +++ b/nautobot_device_onboarding/tests/test_nautobot_keeper.py @@ -4,15 +4,7 @@ from django.contrib.contenttypes.models import ContentType from django.test import TestCase from nautobot.dcim.choices import InterfaceTypeChoices -from nautobot.dcim.models import ( - Device, - DeviceType, - Interface, - Location, - LocationType, - Manufacturer, - Platform, -) +from nautobot.dcim.models import Device, DeviceType, Interface, Location, LocationType, Manufacturer, Platform from nautobot.extras.choices import CustomFieldTypeChoices from nautobot.extras.models import CustomField, Role, Status from nautobot.extras.models.secrets import SecretsGroup diff --git a/nautobot_device_onboarding/utils/diffsync_utils.py b/nautobot_device_onboarding/utils/diffsync_utils.py index a9193216..4c927efe 100644 --- a/nautobot_device_onboarding/utils/diffsync_utils.py +++ b/nautobot_device_onboarding/utils/diffsync_utils.py @@ -4,9 +4,31 @@ from django.core.exceptions import ObjectDoesNotExist, ValidationError from nautobot.apps.choices import PrefixTypeChoices +from nautobot.dcim.models import Device from nautobot.ipam.models import IPAddress, Prefix +def generate_device_querset_from_command_getter_result(command_getter_result): + """Generate a Nautobot device queryset based on data returned from CommandGetter.""" + devices_to_sync_hostnames = [] + devices_to_sync_serial_numbers = [] + for hostname, device_data in command_getter_result.items(): + devices_to_sync_hostnames.append(hostname) + devices_to_sync_serial_numbers.append(device_data["serial"]) + device_queryset = Device.objects.filter(name__in=devices_to_sync_hostnames).filter( + serial__in=devices_to_sync_serial_numbers + ) + return device_queryset + + +def check_data_type(data): + """Verify data is of type dict.""" + data_type_check_result = True + if not isinstance(data, dict): + data_type_check_result = False + return data_type_check_result + + def get_or_create_prefix(host, mask_length, default_status, namespace, job=None): """Attempt to get a Nautobot Prefix, create a new one if necessary.""" prefix = None diff --git a/nautobot_device_onboarding/utils/inventory_creator.py b/nautobot_device_onboarding/utils/inventory_creator.py index d4327dc2..37051f94 100755 --- a/nautobot_device_onboarding/utils/inventory_creator.py +++ b/nautobot_device_onboarding/utils/inventory_creator.py @@ -1,10 +1,7 @@ """Inventory Creator and Helpers.""" from django.conf import settings -from nautobot.extras.choices import ( - SecretsGroupAccessTypeChoices, - SecretsGroupSecretTypeChoices, -) +from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from netmiko import SSHDetect from nornir.core.inventory import ConnectionOptions, Host From 4e021feef7fa42cf6ef2fbfe7fc5dd2c5a3d610b Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Tue, 20 Feb 2024 22:22:10 -0600 Subject: [PATCH 080/225] add platform none check --- nautobot_device_onboarding/nornir_plays/command_getter.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index caea61a8..f960f17c 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -1,6 +1,6 @@ """Command Getter.""" -from nornir.core.task import Task +from nornir.core.task import Result, Task from nornir_netmiko.tasks import netmiko_send_command @@ -16,6 +16,12 @@ def _get_commands_to_run(yaml_parsed_info, command_getter_job): def netmiko_send_commands(task: Task, command_getter_job: str): """Run commands specified in PLATFORM_COMMAND_MAP.""" + if not task.host.platform: + return Result( + host=task.host, + result=f"{task.host.name} has no platform set.", + failed=True + ) commands = _get_commands_to_run(task.host.data["platform_parsing_info"], command_getter_job) for command in commands: command_use_textfsm = task.host.data["platform_parsing_info"][command_getter_job]["use_textfsm"] From 28123c115725f2d06229c0b7b385dcaf8972874a Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Tue, 20 Feb 2024 22:23:42 -0600 Subject: [PATCH 081/225] fix formatting --- .../nornir_plays/command_getter.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index f960f17c..cbd11bbc 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -17,11 +17,11 @@ def _get_commands_to_run(yaml_parsed_info, command_getter_job): def netmiko_send_commands(task: Task, command_getter_job: str): """Run commands specified in PLATFORM_COMMAND_MAP.""" if not task.host.platform: - return Result( - host=task.host, - result=f"{task.host.name} has no platform set.", - failed=True - ) + return Result( + host=task.host, + result=f"{task.host.name} has no platform set.", + failed=True + ) commands = _get_commands_to_run(task.host.data["platform_parsing_info"], command_getter_job) for command in commands: command_use_textfsm = task.host.data["platform_parsing_info"][command_getter_job]["use_textfsm"] From 1d7b3d99ed05854999f5fded59de3945fa8b085e Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Tue, 20 Feb 2024 22:33:31 -0600 Subject: [PATCH 082/225] add conditional for no platform --- nautobot_device_onboarding/utils/inventory_creator.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/utils/inventory_creator.py b/nautobot_device_onboarding/utils/inventory_creator.py index 37051f94..b1679f71 100755 --- a/nautobot_device_onboarding/utils/inventory_creator.py +++ b/nautobot_device_onboarding/utils/inventory_creator.py @@ -69,7 +69,10 @@ def _set_inventory(host_ip, platform, port, secrets_group): platform = platform.network_driver else: platform = guess_netmiko_device_type(host_ip, username, password, port) - parsing_info = _get_platform_parsing_info(platform) + if platform + parsing_info = _get_platform_parsing_info(platform) + else: + parsing_info = {} print(parsing_info) print(type(parsing_info)) From fc0e7e8de6916fb256dc005c90f264baaebfee56 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Tue, 20 Feb 2024 22:34:46 -0600 Subject: [PATCH 083/225] add conditional for no platform --- nautobot_device_onboarding/utils/inventory_creator.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/nautobot_device_onboarding/utils/inventory_creator.py b/nautobot_device_onboarding/utils/inventory_creator.py index b1679f71..740a959f 100755 --- a/nautobot_device_onboarding/utils/inventory_creator.py +++ b/nautobot_device_onboarding/utils/inventory_creator.py @@ -2,11 +2,10 @@ from django.conf import settings from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices -from netmiko import SSHDetect -from nornir.core.inventory import ConnectionOptions, Host - from nautobot_device_onboarding.exceptions import OnboardException from nautobot_device_onboarding.utils.helper import _get_platform_parsing_info +from netmiko import SSHDetect +from nornir.core.inventory import ConnectionOptions, Host def _parse_credentials(credentials): @@ -69,7 +68,7 @@ def _set_inventory(host_ip, platform, port, secrets_group): platform = platform.network_driver else: platform = guess_netmiko_device_type(host_ip, username, password, port) - if platform + if platform: parsing_info = _get_platform_parsing_info(platform) else: parsing_info = {} From ebe946d8d6ab8c1b8a997bf3065f87bc3ee336ae Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Tue, 20 Feb 2024 23:02:53 -0600 Subject: [PATCH 084/225] add conditional for no platform --- .../nornir_plays/command_getter.py | 1 + .../nornir_plays/processor.py | 13 +++++++++++-- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index cbd11bbc..ff8f9c23 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -17,6 +17,7 @@ def _get_commands_to_run(yaml_parsed_info, command_getter_job): def netmiko_send_commands(task: Task, command_getter_job: str): """Run commands specified in PLATFORM_COMMAND_MAP.""" if not task.host.platform: + print("before returnf") return Result( host=task.host, result=f"{task.host.name} has no platform set.", diff --git a/nautobot_device_onboarding/nornir_plays/processor.py b/nautobot_device_onboarding/nornir_plays/processor.py index 8a972f98..c86f8610 100755 --- a/nautobot_device_onboarding/nornir_plays/processor.py +++ b/nautobot_device_onboarding/nornir_plays/processor.py @@ -2,13 +2,12 @@ from typing import Dict +from nautobot_device_onboarding.utils.formatter import extract_show_data from nornir.core.inventory import Host from nornir.core.task import MultiResult, Task from nornir_nautobot.exceptions import NornirNautobotException from nornir_nautobot.plugins.processors import BaseLoggingProcessor -from nautobot_device_onboarding.utils.formatter import extract_show_data - class ProcessorDO(BaseLoggingProcessor): """Processor class for Device Onboarding jobs.""" @@ -18,6 +17,10 @@ def __init__(self, logger, command_outputs): self.logger = logger self.data: Dict = command_outputs + def task_instance_started(self, task: Task, host: Host) -> None: + if not self.data.get(host.name): + self.data[host.name] = {} + def task_instance_completed(self, task: Task, host: Host, result: MultiResult) -> None: """Nornir processor task completion for OS upgrades. @@ -43,6 +46,12 @@ def task_instance_completed(self, task: Task, host: Host, result: MultiResult) - f"task_instance_completed Task Name: {task.name} Task Result: {result.result}", extra={"object": task.host}, ) + if result.name == "netmiko_send_commands": + self.data[host.name].update( + { + "failed": result.failed, + } + ) def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult) -> None: """Processor for Logging on SubTask Completed.""" From f91afecb796824c24fbcd76d6b5067328ea2196e Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Tue, 20 Feb 2024 23:15:06 -0600 Subject: [PATCH 085/225] add unsupported check --- nautobot_device_onboarding/nornir_plays/command_getter.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index ff8f9c23..3845004f 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -1,5 +1,6 @@ """Command Getter.""" +from nautobot_device_onboarding.constants import NETMIKO_TO_NAPALM_STATIC from nornir.core.task import Result, Task from nornir_netmiko.tasks import netmiko_send_command @@ -17,12 +18,17 @@ def _get_commands_to_run(yaml_parsed_info, command_getter_job): def netmiko_send_commands(task: Task, command_getter_job: str): """Run commands specified in PLATFORM_COMMAND_MAP.""" if not task.host.platform: - print("before returnf") return Result( host=task.host, result=f"{task.host.name} has no platform set.", failed=True ) + if task.host.platform not in list(NETMIKO_TO_NAPALM_STATIC.keys()): + return Result( + host=task.host, + result=f"{task.host.name} has a unsupported platform set.", + failed=True + ) commands = _get_commands_to_run(task.host.data["platform_parsing_info"], command_getter_job) for command in commands: command_use_textfsm = task.host.data["platform_parsing_info"][command_getter_job]["use_textfsm"] From 1da8ab69419a28d3cde33823e867d179406ba2ce Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Tue, 20 Feb 2024 23:23:42 -0600 Subject: [PATCH 086/225] add readtimeout --- nautobot_device_onboarding/nornir_plays/command_getter.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index 3845004f..c9868ec7 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -32,4 +32,4 @@ def netmiko_send_commands(task: Task, command_getter_job: str): commands = _get_commands_to_run(task.host.data["platform_parsing_info"], command_getter_job) for command in commands: command_use_textfsm = task.host.data["platform_parsing_info"][command_getter_job]["use_textfsm"] - task.run(task=netmiko_send_command, name=command, command_string=command, use_textfsm=command_use_textfsm) + task.run(task=netmiko_send_command, name=command, command_string=command, use_textfsm=command_use_textfsm, read_timeout=60) From 59cd59be02c28fb9527fc91b0571f02717bc2c2c Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Wed, 21 Feb 2024 08:46:05 -0600 Subject: [PATCH 087/225] add failed task UI logging --- nautobot_device_onboarding/nornir_plays/processor.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/nautobot_device_onboarding/nornir_plays/processor.py b/nautobot_device_onboarding/nornir_plays/processor.py index c86f8610..b215b3df 100755 --- a/nautobot_device_onboarding/nornir_plays/processor.py +++ b/nautobot_device_onboarding/nornir_plays/processor.py @@ -52,6 +52,9 @@ def task_instance_completed(self, task: Task, host: Host, result: MultiResult) - "failed": result.failed, } ) + if result.failed: + self.logger.warning(f"Task Failed! Result {result.result}.", extra={"object": task.host}) + def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult) -> None: """Processor for Logging on SubTask Completed.""" From 7c84d46a0a03af3d459d23bea419cea5e9ba19ca Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Wed, 21 Feb 2024 12:03:20 -0600 Subject: [PATCH 088/225] add junos support to do --- .../command_mappers/cisco_ios.yml | 1 - .../command_mappers/juniper_junos.yml | 13 +++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/cisco_ios.yml b/nautobot_device_onboarding/command_mappers/cisco_ios.yml index f5fac7f5..af4b588f 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_ios.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_ios.yml @@ -25,4 +25,3 @@ network_importer: interfaces: command: "show interfaces" jpath: "[*].[$interface$,link_status,protocol_status,hardware_type,mac_address,ip_address,prefix_length,mtu]" - post_processor: "{{ obj }}" diff --git a/nautobot_device_onboarding/command_mappers/juniper_junos.yml b/nautobot_device_onboarding/command_mappers/juniper_junos.yml index f44de5f1..9f97b7c4 100755 --- a/nautobot_device_onboarding/command_mappers/juniper_junos.yml +++ b/nautobot_device_onboarding/command_mappers/juniper_junos.yml @@ -2,17 +2,18 @@ device_onboarding: use_textfsm: true hostname: - jpath: "[*].hostname" command: "show version" + jpath: "[*].hostname" serial: command: "show version" - jpath: "[*].serial[0]" + jpath: "[*].serial_number" device_type: - command: "show inventory" - jpath: "[?name=='Chassis'].pid" + command: "show version" + jpath: "[*].model" mgmt_interface: command: "show interfaces" - jpath: "[?ip_address=='{{ host_info }}'].interface" + jpath: "[?local=='{{ host_info }}'].interface" mask_length: command: "show interfaces" - jpath: "[?ip_address=='{{ host_info }}'].prefix_length" + jpath: "[?local=='{{ host_info }}'].destination" + post_processor: "{{ obj[0] | ipaddress_interface('netmask') | netmask_to_cidr }}" From 831219284e97f57a902d84d7436283716798603c Mon Sep 17 00:00:00 2001 From: David Cates Date: Wed, 21 Feb 2024 11:13:38 -0700 Subject: [PATCH 089/225] add logging for failed reason --- .../adapters/network_importer_adapters.py | 9 ++++++-- .../diffsync/adapters/onboarding_adapters.py | 19 +++++++++++---- .../nornir_plays/command_getter.py | 23 +++++++++---------- .../nornir_plays/processor.py | 5 ++-- nautobot_device_onboarding/utils/formatter.py | 9 ++++---- .../utils/inventory_creator.py | 5 ++-- 6 files changed, 43 insertions(+), 27 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index 7b791099..d3a11f4e 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -216,11 +216,16 @@ def _handle_failed_connections(self, device_data): for hostname in device_data: if device_data[hostname].get("failed"): - self.job.logger.error(f"Connection or data error for {hostname}. " "This device will not be synced.") + self.job.logger.error( + f"{hostname}: Connection or data error, this device will not be synced. " + f"{device_data[hostname].get('failed_reason')}" + ) failed_device_connections.append(hostname) for hostname in failed_device_connections: del device_data[hostname] - self.job.command_getter_result = device_data + if failed_device_connections: + self.job.logger.warning(f"Failed devices: {failed_device_connections}") + self.device_data = device_data self.job.devices_to_load = diffsync_utils.generate_device_querset_from_command_getter_result(device_data) def execute_command_getter(self): diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index ee13d848..c8add562 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -151,10 +151,15 @@ def _handle_failed_connections(self, device_data): for ip_address in device_data: if device_data[ip_address].get("failed"): - self.job.logger.error(f"Connection or data error for {ip_address}. " "This device will not be synced.") + self.job.logger.error( + f"{ip_address}: Connection or data error, this device will not be synced. " + f"{device_data[ip_address].get('failed_reason')}" + ) failed_ip_addresses.append(ip_address) for ip_address in failed_ip_addresses: del device_data[ip_address] + if failed_ip_addresses: + self.job.logger.warning(f"Failed IP Addresses: {failed_ip_addresses}") self.device_data = device_data def execute_command_getter(self): @@ -208,7 +213,9 @@ def load_manufacturers(self): except diffsync.ObjectAlreadyExists: pass except KeyError as err: - self.job.logger.error(f"{ip_address}: Manufacturer due to missing key in returned data, {err}") + self.job.logger.error( + f"{ip_address}: Unable to load Manufacturer due to missing key in returned data, {err}" + ) def load_platforms(self): """Load platforms into the DiffSync store.""" @@ -227,7 +234,9 @@ def load_platforms(self): except diffsync.ObjectAlreadyExists: pass except KeyError as err: - self.job.logger.error(f"{ip_address}: Platform due to missing key in returned data, {err}") + self.job.logger.error( + f"{ip_address}: Unable to load Platform due to missing key in returned data, {err}" + ) def load_device_types(self): """Load device types into the DiffSync store.""" @@ -246,7 +255,9 @@ def load_device_types(self): except diffsync.ObjectAlreadyExists: pass except KeyError as err: - self.job.logger.error(f"{ip_address}: DeviceType due to missing key in returned data, {err}") + self.job.logger.error( + f"{ip_address}: Unable to load DeviceType due to missing key in returned data, {err}" + ) def load_devices(self): """Load devices into the DiffSync store.""" diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index c9868ec7..c2d6c2cb 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -1,9 +1,10 @@ """Command Getter.""" -from nautobot_device_onboarding.constants import NETMIKO_TO_NAPALM_STATIC from nornir.core.task import Result, Task from nornir_netmiko.tasks import netmiko_send_command +from nautobot_device_onboarding.constants import NETMIKO_TO_NAPALM_STATIC + def _get_commands_to_run(yaml_parsed_info, command_getter_job): """Load yaml file and look up all commands that need to be run.""" @@ -18,18 +19,16 @@ def _get_commands_to_run(yaml_parsed_info, command_getter_job): def netmiko_send_commands(task: Task, command_getter_job: str): """Run commands specified in PLATFORM_COMMAND_MAP.""" if not task.host.platform: - return Result( - host=task.host, - result=f"{task.host.name} has no platform set.", - failed=True - ) + return Result(host=task.host, result=f"{task.host.name} has no platform set.", failed=True) if task.host.platform not in list(NETMIKO_TO_NAPALM_STATIC.keys()): - return Result( - host=task.host, - result=f"{task.host.name} has a unsupported platform set.", - failed=True - ) + return Result(host=task.host, result=f"{task.host.name} has a unsupported platform set.", failed=True) commands = _get_commands_to_run(task.host.data["platform_parsing_info"], command_getter_job) for command in commands: command_use_textfsm = task.host.data["platform_parsing_info"][command_getter_job]["use_textfsm"] - task.run(task=netmiko_send_command, name=command, command_string=command, use_textfsm=command_use_textfsm, read_timeout=60) + task.run( + task=netmiko_send_command, + name=command, + command_string=command, + use_textfsm=command_use_textfsm, + read_timeout=60, + ) diff --git a/nautobot_device_onboarding/nornir_plays/processor.py b/nautobot_device_onboarding/nornir_plays/processor.py index b215b3df..fe46cd29 100755 --- a/nautobot_device_onboarding/nornir_plays/processor.py +++ b/nautobot_device_onboarding/nornir_plays/processor.py @@ -2,12 +2,13 @@ from typing import Dict -from nautobot_device_onboarding.utils.formatter import extract_show_data from nornir.core.inventory import Host from nornir.core.task import MultiResult, Task from nornir_nautobot.exceptions import NornirNautobotException from nornir_nautobot.plugins.processors import BaseLoggingProcessor +from nautobot_device_onboarding.utils.formatter import extract_show_data + class ProcessorDO(BaseLoggingProcessor): """Processor class for Device Onboarding jobs.""" @@ -54,7 +55,7 @@ def task_instance_completed(self, task: Task, host: Host, result: MultiResult) - ) if result.failed: self.logger.warning(f"Task Failed! Result {result.result}.", extra={"object": task.host}) - + self.data[host.name]["failed_reason"] = result.result def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult) -> None: """Processor for Logging on SubTask Completed.""" diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index 150843be..5d0dc076 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -17,11 +17,10 @@ def load_yaml_datafile(filename): filename (str): Filename within the 'data' directory. """ file_path = os.path.join(DATA_DIR, filename) - if not os.path.isfile(file_path): - raise RuntimeError(f"No data file found at {file_path}") - with open(file_path, "r", encoding="utf-8") as yaml_file: - data = yaml.safe_load(yaml_file) - return data + if os.path.isfile(file_path): + with open(file_path, "r", encoding="utf-8") as yaml_file: + data = yaml.safe_load(yaml_file) + return data def extract_show_data(host, multi_result, command_getter_type): diff --git a/nautobot_device_onboarding/utils/inventory_creator.py b/nautobot_device_onboarding/utils/inventory_creator.py index 740a959f..e297a48d 100755 --- a/nautobot_device_onboarding/utils/inventory_creator.py +++ b/nautobot_device_onboarding/utils/inventory_creator.py @@ -2,11 +2,12 @@ from django.conf import settings from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices -from nautobot_device_onboarding.exceptions import OnboardException -from nautobot_device_onboarding.utils.helper import _get_platform_parsing_info from netmiko import SSHDetect from nornir.core.inventory import ConnectionOptions, Host +from nautobot_device_onboarding.exceptions import OnboardException +from nautobot_device_onboarding.utils.helper import _get_platform_parsing_info + def _parse_credentials(credentials): """Parse and return dictionary of credentials.""" From 7bdb449f1b9240a70320c4a2b0555048cd82cb71 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Wed, 21 Feb 2024 13:12:04 -0600 Subject: [PATCH 090/225] add junos support to do --- .../command_mappers/juniper_junos.yml | 16 ++++++++-------- nautobot_device_onboarding/utils/formatter.py | 3 ++- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/juniper_junos.yml b/nautobot_device_onboarding/command_mappers/juniper_junos.yml index 9f97b7c4..fa6fbee9 100755 --- a/nautobot_device_onboarding/command_mappers/juniper_junos.yml +++ b/nautobot_device_onboarding/command_mappers/juniper_junos.yml @@ -1,18 +1,18 @@ --- device_onboarding: - use_textfsm: true + use_textfsm: false hostname: - command: "show version" - jpath: "[*].hostname" + command: "show version | display json" + jpath: '"software-information"[0]."host-name"[0].data' serial: - command: "show version" - jpath: "[*].serial_number" + command: "show version | display json" + jpath: '"chassis-inventory"[0]."chassis"[0]."serial-number"[0].data' device_type: - command: "show version" - jpath: "[*].model" + command: "show version | display json" + jpath: '"software-information"[0]."product-model"[0].data' mgmt_interface: command: "show interfaces" - jpath: "[?local=='{{ host_info }}'].interface" + jpath: ""interface-information"[0]."logical-interface"[0]."address-family"[0]."interface-address"[0]."ifa-local"[0].data mask_length: command: "show interfaces" jpath: "[?local=='{{ host_info }}'].destination" diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index 150843be..9ffa6d31 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -1,5 +1,6 @@ """Formatter.""" +import json import os import yaml @@ -49,7 +50,7 @@ def extract_show_data(host, multi_result, command_getter_type): extracted_value = extract_data_from_json(multi_result[0].result, j2_rendered_jpath) if command_info.get("post_processor"): transform_template = jinja_env.from_string(command_info["post_processor"]) - extracted_processed = transform_template.render(obj=extracted_value) + extracted_processed = json.loads(transform_template.render(obj=extracted_value)) else: if isinstance(extracted_value, list) and len(extracted_value) == 1: extracted_processed = extracted_value[0] From aa341fe06758037e463e696cec7ed6fd74758ed5 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Wed, 21 Feb 2024 13:13:55 -0600 Subject: [PATCH 091/225] update junos, add json load to linterfaces --- nautobot_device_onboarding/command_mappers/juniper_junos.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/command_mappers/juniper_junos.yml b/nautobot_device_onboarding/command_mappers/juniper_junos.yml index fa6fbee9..89963109 100755 --- a/nautobot_device_onboarding/command_mappers/juniper_junos.yml +++ b/nautobot_device_onboarding/command_mappers/juniper_junos.yml @@ -12,7 +12,7 @@ device_onboarding: jpath: '"software-information"[0]."product-model"[0].data' mgmt_interface: command: "show interfaces" - jpath: ""interface-information"[0]."logical-interface"[0]."address-family"[0]."interface-address"[0]."ifa-local"[0].data + jpath: '"interface-information"[0]."logical-interface"[0]."address-family"[0]."interface-address"[0]."ifa-local"[0].data' mask_length: command: "show interfaces" jpath: "[?local=='{{ host_info }}'].destination" From 9ff1e631ba51e46740abd1214a2116f22c6dd4e0 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Wed, 21 Feb 2024 13:22:23 -0600 Subject: [PATCH 092/225] update junos, add json load to linterfaces --- nautobot_device_onboarding/command_mappers/cisco_ios.yml | 1 + nautobot_device_onboarding/utils/formatter.py | 3 +-- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/cisco_ios.yml b/nautobot_device_onboarding/command_mappers/cisco_ios.yml index af4b588f..eaade3a3 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_ios.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_ios.yml @@ -25,3 +25,4 @@ network_importer: interfaces: command: "show interfaces" jpath: "[*].[$interface$,link_status,protocol_status,hardware_type,mac_address,ip_address,prefix_length,mtu]" + post_processor: "{{ obj | to_json }}" diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index 2a26e925..5d0dc076 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -1,6 +1,5 @@ """Formatter.""" -import json import os import yaml @@ -49,7 +48,7 @@ def extract_show_data(host, multi_result, command_getter_type): extracted_value = extract_data_from_json(multi_result[0].result, j2_rendered_jpath) if command_info.get("post_processor"): transform_template = jinja_env.from_string(command_info["post_processor"]) - extracted_processed = json.loads(transform_template.render(obj=extracted_value)) + extracted_processed = transform_template.render(obj=extracted_value) else: if isinstance(extracted_value, list) and len(extracted_value) == 1: extracted_processed = extracted_value[0] From 6cab6a28e31b00043f086fafcf9385bccf985bc3 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Wed, 21 Feb 2024 13:24:22 -0600 Subject: [PATCH 093/225] update ios to use tojson filter --- nautobot_device_onboarding/command_mappers/cisco_ios.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/command_mappers/cisco_ios.yml b/nautobot_device_onboarding/command_mappers/cisco_ios.yml index eaade3a3..a01d9878 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_ios.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_ios.yml @@ -25,4 +25,4 @@ network_importer: interfaces: command: "show interfaces" jpath: "[*].[$interface$,link_status,protocol_status,hardware_type,mac_address,ip_address,prefix_length,mtu]" - post_processor: "{{ obj | to_json }}" + post_processor: "{{ obj | tojson }}" From c65dcbf9fab5db058189308fa22a702711724521 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Wed, 21 Feb 2024 13:29:09 -0600 Subject: [PATCH 094/225] update ios to use tojson filter --- nautobot_device_onboarding/command_mappers/cisco_ios.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/command_mappers/cisco_ios.yml b/nautobot_device_onboarding/command_mappers/cisco_ios.yml index a01d9878..f5fac7f5 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_ios.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_ios.yml @@ -25,4 +25,4 @@ network_importer: interfaces: command: "show interfaces" jpath: "[*].[$interface$,link_status,protocol_status,hardware_type,mac_address,ip_address,prefix_length,mtu]" - post_processor: "{{ obj | tojson }}" + post_processor: "{{ obj }}" From ce71864b493cd04ecd1ef2e23992273039d33fc1 Mon Sep 17 00:00:00 2001 From: David Cates Date: Wed, 21 Feb 2024 13:15:43 -0700 Subject: [PATCH 095/225] formatting, update function name and doc strings --- .../adapters/network_importer_adapters.py | 18 +++++++++--------- .../diffsync/adapters/onboarding_adapters.py | 6 +++--- .../nornir_plays/processor.py | 7 ++++--- 3 files changed, 16 insertions(+), 15 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index d3a11f4e..1170e124 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -205,14 +205,14 @@ def __init__(self, *args, job, sync=None, **kwargs): "lag_to_interface", ] - def _handle_failed_connections(self, device_data): + def _handle_failed_devices(self, device_data): """ - Handle result data from failed device connections. + Handle result data from failed devices. If a device fails to return expected data, log the result and remove it from the data to be loaded into the diffsync store. """ - failed_device_connections = [] + failed_devices = [] for hostname in device_data: if device_data[hostname].get("failed"): @@ -220,12 +220,12 @@ def _handle_failed_connections(self, device_data): f"{hostname}: Connection or data error, this device will not be synced. " f"{device_data[hostname].get('failed_reason')}" ) - failed_device_connections.append(hostname) - for hostname in failed_device_connections: + failed_devices.append(hostname) + for hostname in failed_devices: del device_data[hostname] - if failed_device_connections: - self.job.logger.warning(f"Failed devices: {failed_device_connections}") - self.device_data = device_data + if failed_devices: + self.job.logger.warning(f"Failed devices: {failed_devices}") + self.job.command_getter_result = device_data self.job.devices_to_load = diffsync_utils.generate_device_querset_from_command_getter_result(device_data) def execute_command_getter(self): @@ -249,7 +249,7 @@ def execute_command_getter(self): if self.job.debug: self.job.logger.debug(f"CommandGetter data type check resut: {data_type_check}") if data_type_check: - self._handle_failed_connections(device_data=result.result) + self._handle_failed_devices(device_data=result.result) else: self.job.logger.error( "Data returned from CommandGetter is not the correct type. " diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index c8add562..e38d893b 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -140,9 +140,9 @@ def _validate_ip_addresses(self, ip_addresses): return True raise netaddr.AddrConversionError - def _handle_failed_connections(self, device_data): + def _handle_failed_devices(self, device_data): """ - Handle result data from failed device connections. + Handle result data from failed devices. If a device fails to return expected data, log the result and remove it from the data to be loaded into the diffsync store. @@ -190,7 +190,7 @@ def execute_command_getter(self): if self.job.debug: self.job.logger.debug(f"CommandGetter data type check resut: {data_type_check}") if data_type_check: - self._handle_failed_connections(device_data=result.result) + self._handle_failed_devices(device_data=result.result) else: self.job.logger.error( "Data returned from CommandGetter is not the correct type. " diff --git a/nautobot_device_onboarding/nornir_plays/processor.py b/nautobot_device_onboarding/nornir_plays/processor.py index fe46cd29..d6c96e68 100755 --- a/nautobot_device_onboarding/nornir_plays/processor.py +++ b/nautobot_device_onboarding/nornir_plays/processor.py @@ -19,11 +19,12 @@ def __init__(self, logger, command_outputs): self.data: Dict = command_outputs def task_instance_started(self, task: Task, host: Host) -> None: + """Processor for logging and data processing on task start.""" if not self.data.get(host.name): self.data[host.name] = {} def task_instance_completed(self, task: Task, host: Host, result: MultiResult) -> None: - """Nornir processor task completion for OS upgrades. + """Processor for logging and data processing on task completed. Args: task (Task): Nornir task individual object @@ -58,7 +59,7 @@ def task_instance_completed(self, task: Task, host: Host, result: MultiResult) - self.data[host.name]["failed_reason"] = result.result def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult) -> None: - """Processor for Logging on SubTask Completed.""" + """Processor for logging and data processing on subtask completed.""" self.logger.info(f"subtask_instance_completed Subtask completed {task.name}.", extra={"object": task.host}) self.logger.info(f"subtask_instance_completed Subtask result {result.result}.", extra={"object": task.host}) @@ -73,7 +74,7 @@ def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult self.data[host.name][k] = v def subtask_instance_started(self, task: Task, host: Host) -> None: # show command start - """Processor for Logging on SubTask Start.""" + """Processor for logging and data processing on subtask start.""" self.logger.info(f"subtask_instance_started Subtask starting {task.name}.", extra={"object": task.host}) if not self.data.get(host.name): self.data[host.name] = { From 3b1fb5fe810e12539774bce26238b37e550821f1 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Wed, 21 Feb 2024 15:36:37 -0600 Subject: [PATCH 096/225] use j2 sandbox and nb core render function --- .../command_mappers/cisco_ios.yml | 5 +- .../command_mappers/cisco_nxos.yml | 4 +- .../command_mappers/juniper_junos.yml | 2 +- nautobot_device_onboarding/utils/formatter.py | 76 +++++++++++++++++-- 4 files changed, 74 insertions(+), 13 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/cisco_ios.yml b/nautobot_device_onboarding/command_mappers/cisco_ios.yml index f5fac7f5..2634342e 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_ios.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_ios.yml @@ -13,10 +13,10 @@ device_onboarding: jpath: "[*].hardware[0]" mgmt_interface: command: "show interfaces" - jpath: "[?ip_address=='{{ host_info }}'].interface" + jpath: "[?ip_address=='{{ obj }}'].interface" mask_length: command: "show interfaces" - jpath: "[?ip_address=='{{ host_info }}'].prefix_length" + jpath: "[?ip_address=='{{ obj }}'].prefix_length" network_importer: use_textfsm: true serial: @@ -25,4 +25,3 @@ network_importer: interfaces: command: "show interfaces" jpath: "[*].[$interface$,link_status,protocol_status,hardware_type,mac_address,ip_address,prefix_length,mtu]" - post_processor: "{{ obj }}" diff --git a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml index 8074c1c9..17ed88a8 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml @@ -12,7 +12,7 @@ device_onboarding: jpath: "[*].platform" mgmt_interface: command: "show interface" - jpath: "[?ip_address=='{{ host_info }}'].interface || [`mgmt0`]" + jpath: "[?ip_address=='{{ obj }}'].interface || [`mgmt0`]" mask_length: command: "show interface" - jpath: "[?ip_address=='{{ host_info }}'].prefix_length || [`31`]" + jpath: "[?ip_address=='{{ obj }}'].prefix_length || [`31`]" diff --git a/nautobot_device_onboarding/command_mappers/juniper_junos.yml b/nautobot_device_onboarding/command_mappers/juniper_junos.yml index 89963109..2c79f54b 100755 --- a/nautobot_device_onboarding/command_mappers/juniper_junos.yml +++ b/nautobot_device_onboarding/command_mappers/juniper_junos.yml @@ -15,5 +15,5 @@ device_onboarding: jpath: '"interface-information"[0]."logical-interface"[0]."address-family"[0]."interface-address"[0]."ifa-local"[0].data' mask_length: command: "show interfaces" - jpath: "[?local=='{{ host_info }}'].destination" + jpath: "[?local=='{{ obj }}'].destination" post_processor: "{{ obj[0] | ipaddress_interface('netmask') | netmask_to_cidr }}" diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index 5d0dc076..5e19f193 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -4,12 +4,76 @@ import yaml from django.template import engines +from django.utils.module_loading import import_string from jdiff import extract_data_from_json -from jinja2 import Environment +from jinja2 import exceptions as jinja_errors +from jinja2.sandbox import SandboxedEnvironment +from nautobot.core.utils.data import render_jinja2 +from nautobot_device_onboarding.exceptions import OnboardException DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), "command_mappers")) +def get_django_env(): + """Load Django Jinja filters from the Django jinja template engine, and add them to the jinja_env. + + Returns: + SandboxedEnvironment + """ + # Use a custom Jinja2 environment instead of Django's to avoid HTML escaping + j2_env = { + "undefined": "jinja2.StrictUndefined", + "trim_blocks": True, + "lstrip_blocks": False, + } + if isinstance(j2_env["undefined"], str): + j2_env["undefined"] = import_string(j2_env["undefined"]) + jinja_env = SandboxedEnvironment(**j2_env) + jinja_env.filters = engines["jinja"].env.filters + return jinja_env + + +def render_jinja_template(obj, template): + """ + Helper function to render Jinja templates. + + Args: + obj (Device): The Device object from Nautobot. + template (str): A Jinja2 template to be rendered. + + Returns: + str: The ``template`` rendered. + + Raises: + NornirNautobotException: When there is an error rendering the ``template``. + """ + try: + return render_jinja2(template_code=template, context={"obj": obj}) + except jinja_errors.UndefinedError as error: + error_msg = ( + "`E3019:` Jinja encountered and UndefinedError`, check the template for missing variable definitions.\n" + f"Template:\n{template}\n" + f"Original Error: {error}" + ) + raise OnboardException(error_msg) from error + + except jinja_errors.TemplateSyntaxError as error: # Also catches subclass of TemplateAssertionError + error_msg = ( + f"`E3020:` Jinja encountered a SyntaxError at line number {error.lineno}," + f"check the template for invalid Jinja syntax.\nTemplate:\n{template}\n" + f"Original Error: {error}" + ) + raise OnboardException(error_msg) from error + # Intentionally not catching TemplateNotFound errors since template is passes as a string and not a filename + except jinja_errors.TemplateError as error: # Catches all remaining Jinja errors + error_msg = ( + "`E3021:` Jinja encountered an unexpected TemplateError; check the template for correctness\n" + f"Template:\n{template}\n" + f"Original Error: {error}" + ) + raise OnboardException(error_msg) from error + + def load_yaml_datafile(filename): """Get the contents of the given YAML data file. @@ -31,8 +95,7 @@ def extract_show_data(host, multi_result, command_getter_type): multi_result (multiResult): multiresult object from nornir command_getter_type (str): to know what dict to pull, device_onboarding or network_importer. """ - jinja_env = Environment(autoescape=True, trim_blocks=True, lstrip_blocks=False) - jinja_env.filters = engines["jinja"].env.filters + get_django_env() host_platform = host.platform if host_platform == "cisco_xe": @@ -43,13 +106,12 @@ def extract_show_data(host, multi_result, command_getter_type): for default_dict_field, command_info in command_jpaths[command_getter_type].items(): if not default_dict_field == "use_textfsm": if command_info["command"] == multi_result[0].name: - j2_rendered_jpath_template = jinja_env.from_string(command_info["jpath"]) - j2_rendered_jpath = j2_rendered_jpath_template.render(host_info=host.name) + j2_rendered_jpath = render_jinja_template(obj=host.name, template=command_info["jpath"]) extracted_value = extract_data_from_json(multi_result[0].result, j2_rendered_jpath) if command_info.get("post_processor"): - transform_template = jinja_env.from_string(command_info["post_processor"]) - extracted_processed = transform_template.render(obj=extracted_value) + extracted_processed = render_jinja_template(obj=extracted_value, template=command_info["post_processor"]) else: + extracted_processed = extracted_value if isinstance(extracted_value, list) and len(extracted_value) == 1: extracted_processed = extracted_value[0] result_dict[default_dict_field] = extracted_processed From f3e2c8d1bb61221f85af8914ef825b11c9dbb4e3 Mon Sep 17 00:00:00 2001 From: David Cates Date: Wed, 21 Feb 2024 14:40:05 -0700 Subject: [PATCH 097/225] poetry lock and black --- poetry.lock | 319 +++++++++++++++++++++++++++------------------------- tasks.py | 8 +- 2 files changed, 169 insertions(+), 158 deletions(-) diff --git a/poetry.lock b/poetry.lock index df1d2cea..d8276873 100755 --- a/poetry.lock +++ b/poetry.lock @@ -27,13 +27,13 @@ files = [ [[package]] name = "anyio" -version = "4.2.0" +version = "4.3.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" files = [ - {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"}, - {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"}, + {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, + {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, ] [package.dependencies] @@ -47,6 +47,17 @@ doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphin test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] trio = ["trio (>=0.23)"] +[[package]] +name = "appnope" +version = "0.1.4" +description = "Disable App Nap on macOS >= 10.9" +optional = false +python-versions = ">=3.6" +files = [ + {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, + {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, +] + [[package]] name = "asgiref" version = "3.7.2" @@ -631,63 +642,63 @@ files = [ [[package]] name = "coverage" -version = "7.4.1" +version = "7.4.2" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, - {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, - {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, - {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, - {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, - {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, - {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, - {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, - {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, - {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, - {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, - {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, - {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, - {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, - {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, - {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, - {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, - {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, - {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, - {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, - {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, - {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, + {file = "coverage-7.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf54c3e089179d9d23900e3efc86d46e4431188d9a657f345410eecdd0151f50"}, + {file = "coverage-7.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fe6e43c8b510719b48af7db9631b5fbac910ade4bd90e6378c85ac5ac706382c"}, + {file = "coverage-7.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b98c89db1b150d851a7840142d60d01d07677a18f0f46836e691c38134ed18b"}, + {file = "coverage-7.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5f9683be6a5b19cd776ee4e2f2ffb411424819c69afab6b2db3a0a364ec6642"}, + {file = "coverage-7.4.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78cdcbf7b9cb83fe047ee09298e25b1cd1636824067166dc97ad0543b079d22f"}, + {file = "coverage-7.4.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2599972b21911111114100d362aea9e70a88b258400672626efa2b9e2179609c"}, + {file = "coverage-7.4.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ef00d31b7569ed3cb2036f26565f1984b9fc08541731ce01012b02a4c238bf03"}, + {file = "coverage-7.4.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:20a875bfd8c282985c4720c32aa05056f77a68e6d8bbc5fe8632c5860ee0b49b"}, + {file = "coverage-7.4.2-cp310-cp310-win32.whl", hash = "sha256:b3f2b1eb229f23c82898eedfc3296137cf1f16bb145ceab3edfd17cbde273fb7"}, + {file = "coverage-7.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7df95fdd1432a5d2675ce630fef5f239939e2b3610fe2f2b5bf21fa505256fa3"}, + {file = "coverage-7.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8ddbd158e069dded57738ea69b9744525181e99974c899b39f75b2b29a624e2"}, + {file = "coverage-7.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81a5fb41b0d24447a47543b749adc34d45a2cf77b48ca74e5bf3de60a7bd9edc"}, + {file = "coverage-7.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2412e98e70f16243be41d20836abd5f3f32edef07cbf8f407f1b6e1ceae783ac"}, + {file = "coverage-7.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb79414c15c6f03f56cc68fa06994f047cf20207c31b5dad3f6bab54a0f66ef"}, + {file = "coverage-7.4.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf89ab85027427d351f1de918aff4b43f4eb5f33aff6835ed30322a86ac29c9e"}, + {file = "coverage-7.4.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a178b7b1ac0f1530bb28d2e51f88c0bab3e5949835851a60dda80bff6052510c"}, + {file = "coverage-7.4.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:06fe398145a2e91edaf1ab4eee66149c6776c6b25b136f4a86fcbbb09512fd10"}, + {file = "coverage-7.4.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:18cac867950943fe93d6cd56a67eb7dcd2d4a781a40f4c1e25d6f1ed98721a55"}, + {file = "coverage-7.4.2-cp311-cp311-win32.whl", hash = "sha256:f72cdd2586f9a769570d4b5714a3837b3a59a53b096bb954f1811f6a0afad305"}, + {file = "coverage-7.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:d779a48fac416387dd5673fc5b2d6bd903ed903faaa3247dc1865c65eaa5a93e"}, + {file = "coverage-7.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:adbdfcda2469d188d79771d5696dc54fab98a16d2ef7e0875013b5f56a251047"}, + {file = "coverage-7.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ac4bab32f396b03ebecfcf2971668da9275b3bb5f81b3b6ba96622f4ef3f6e17"}, + {file = "coverage-7.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:006d220ba2e1a45f1de083d5022d4955abb0aedd78904cd5a779b955b019ec73"}, + {file = "coverage-7.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3733545eb294e5ad274abe131d1e7e7de4ba17a144505c12feca48803fea5f64"}, + {file = "coverage-7.4.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42a9e754aa250fe61f0f99986399cec086d7e7a01dd82fd863a20af34cbce962"}, + {file = "coverage-7.4.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2ed37e16cf35c8d6e0b430254574b8edd242a367a1b1531bd1adc99c6a5e00fe"}, + {file = "coverage-7.4.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:b953275d4edfab6cc0ed7139fa773dfb89e81fee1569a932f6020ce7c6da0e8f"}, + {file = "coverage-7.4.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32b4ab7e6c924f945cbae5392832e93e4ceb81483fd6dc4aa8fb1a97b9d3e0e1"}, + {file = "coverage-7.4.2-cp312-cp312-win32.whl", hash = "sha256:f5df76c58977bc35a49515b2fbba84a1d952ff0ec784a4070334dfbec28a2def"}, + {file = "coverage-7.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:34423abbaad70fea9d0164add189eabaea679068ebdf693baa5c02d03e7db244"}, + {file = "coverage-7.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5b11f9c6587668e495cc7365f85c93bed34c3a81f9f08b0920b87a89acc13469"}, + {file = "coverage-7.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:51593a1f05c39332f623d64d910445fdec3d2ac2d96b37ce7f331882d5678ddf"}, + {file = "coverage-7.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69f1665165ba2fe7614e2f0c1aed71e14d83510bf67e2ee13df467d1c08bf1e8"}, + {file = "coverage-7.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3c8bbb95a699c80a167478478efe5e09ad31680931ec280bf2087905e3b95ec"}, + {file = "coverage-7.4.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:175f56572f25e1e1201d2b3e07b71ca4d201bf0b9cb8fad3f1dfae6a4188de86"}, + {file = "coverage-7.4.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8562ca91e8c40864942615b1d0b12289d3e745e6b2da901d133f52f2d510a1e3"}, + {file = "coverage-7.4.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d9a1ef0f173e1a19738f154fb3644f90d0ada56fe6c9b422f992b04266c55d5a"}, + {file = "coverage-7.4.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f40ac873045db4fd98a6f40387d242bde2708a3f8167bd967ccd43ad46394ba2"}, + {file = "coverage-7.4.2-cp38-cp38-win32.whl", hash = "sha256:d1b750a8409bec61caa7824bfd64a8074b6d2d420433f64c161a8335796c7c6b"}, + {file = "coverage-7.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b4ae777bebaed89e3a7e80c4a03fac434a98a8abb5251b2a957d38fe3fd30088"}, + {file = "coverage-7.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ff7f92ae5a456101ca8f48387fd3c56eb96353588e686286f50633a611afc95"}, + {file = "coverage-7.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:861d75402269ffda0b33af94694b8e0703563116b04c681b1832903fac8fd647"}, + {file = "coverage-7.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3507427d83fa961cbd73f11140f4a5ce84208d31756f7238d6257b2d3d868405"}, + {file = "coverage-7.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf711d517e21fb5bc429f5c4308fbc430a8585ff2a43e88540264ae87871e36a"}, + {file = "coverage-7.4.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c00e54f0bd258ab25e7f731ca1d5144b0bf7bec0051abccd2bdcff65fa3262c9"}, + {file = "coverage-7.4.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f8e845d894e39fb53834da826078f6dc1a933b32b1478cf437007367efaf6f6a"}, + {file = "coverage-7.4.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:840456cb1067dc350af9080298c7c2cfdddcedc1cb1e0b30dceecdaf7be1a2d3"}, + {file = "coverage-7.4.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c11ca2df2206a4e3e4c4567f52594637392ed05d7c7fb73b4ea1c658ba560265"}, + {file = "coverage-7.4.2-cp39-cp39-win32.whl", hash = "sha256:3ff5bdb08d8938d336ce4088ca1a1e4b6c8cd3bef8bb3a4c0eb2f37406e49643"}, + {file = "coverage-7.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:ac9e95cefcf044c98d4e2c829cd0669918585755dd9a92e28a1a7012322d0a95"}, + {file = "coverage-7.4.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:f593a4a90118d99014517c2679e04a4ef5aee2d81aa05c26c734d271065efcb6"}, + {file = "coverage-7.4.2.tar.gz", hash = "sha256:1a5ee18e3a8d766075ce9314ed1cb695414bae67df6a4b0805f5137d93d6f1cb"}, ] [package.extras] @@ -709,43 +720,43 @@ dev = ["polib"] [[package]] name = "cryptography" -version = "42.0.2" +version = "42.0.4" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:701171f825dcab90969596ce2af253143b93b08f1a716d4b2a9d2db5084ef7be"}, - {file = "cryptography-42.0.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:61321672b3ac7aade25c40449ccedbc6db72c7f5f0fdf34def5e2f8b51ca530d"}, - {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea2c3ffb662fec8bbbfce5602e2c159ff097a4631d96235fcf0fb00e59e3ece4"}, - {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b15c678f27d66d247132cbf13df2f75255627bcc9b6a570f7d2fd08e8c081d2"}, - {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8e88bb9eafbf6a4014d55fb222e7360eef53e613215085e65a13290577394529"}, - {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a047682d324ba56e61b7ea7c7299d51e61fd3bca7dad2ccc39b72bd0118d60a1"}, - {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:36d4b7c4be6411f58f60d9ce555a73df8406d484ba12a63549c88bd64f7967f1"}, - {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a00aee5d1b6c20620161984f8ab2ab69134466c51f58c052c11b076715e72929"}, - {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b97fe7d7991c25e6a31e5d5e795986b18fbbb3107b873d5f3ae6dc9a103278e9"}, - {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5fa82a26f92871eca593b53359c12ad7949772462f887c35edaf36f87953c0e2"}, - {file = "cryptography-42.0.2-cp37-abi3-win32.whl", hash = "sha256:4b063d3413f853e056161eb0c7724822a9740ad3caa24b8424d776cebf98e7ee"}, - {file = "cryptography-42.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:841ec8af7a8491ac76ec5a9522226e287187a3107e12b7d686ad354bb78facee"}, - {file = "cryptography-42.0.2-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:55d1580e2d7e17f45d19d3b12098e352f3a37fe86d380bf45846ef257054b242"}, - {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28cb2c41f131a5758d6ba6a0504150d644054fd9f3203a1e8e8d7ac3aea7f73a"}, - {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9097a208875fc7bbeb1286d0125d90bdfed961f61f214d3f5be62cd4ed8a446"}, - {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:44c95c0e96b3cb628e8452ec060413a49002a247b2b9938989e23a2c8291fc90"}, - {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2f9f14185962e6a04ab32d1abe34eae8a9001569ee4edb64d2304bf0d65c53f3"}, - {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:09a77e5b2e8ca732a19a90c5bca2d124621a1edb5438c5daa2d2738bfeb02589"}, - {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad28cff53f60d99a928dfcf1e861e0b2ceb2bc1f08a074fdd601b314e1cc9e0a"}, - {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:130c0f77022b2b9c99d8cebcdd834d81705f61c68e91ddd614ce74c657f8b3ea"}, - {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:fa3dec4ba8fb6e662770b74f62f1a0c7d4e37e25b58b2bf2c1be4c95372b4a33"}, - {file = "cryptography-42.0.2-cp39-abi3-win32.whl", hash = "sha256:3dbd37e14ce795b4af61b89b037d4bc157f2cb23e676fa16932185a04dfbf635"}, - {file = "cryptography-42.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:8a06641fb07d4e8f6c7dda4fc3f8871d327803ab6542e33831c7ccfdcb4d0ad6"}, - {file = "cryptography-42.0.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:087887e55e0b9c8724cf05361357875adb5c20dec27e5816b653492980d20380"}, - {file = "cryptography-42.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a7ef8dd0bf2e1d0a27042b231a3baac6883cdd5557036f5e8df7139255feaac6"}, - {file = "cryptography-42.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4383b47f45b14459cab66048d384614019965ba6c1a1a141f11b5a551cace1b2"}, - {file = "cryptography-42.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:fbeb725c9dc799a574518109336acccaf1303c30d45c075c665c0793c2f79a7f"}, - {file = "cryptography-42.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:320948ab49883557a256eab46149df79435a22d2fefd6a66fe6946f1b9d9d008"}, - {file = "cryptography-42.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5ef9bc3d046ce83c4bbf4c25e1e0547b9c441c01d30922d812e887dc5f125c12"}, - {file = "cryptography-42.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:52ed9ebf8ac602385126c9a2fe951db36f2cb0c2538d22971487f89d0de4065a"}, - {file = "cryptography-42.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:141e2aa5ba100d3788c0ad7919b288f89d1fe015878b9659b307c9ef867d3a65"}, - {file = "cryptography-42.0.2.tar.gz", hash = "sha256:e0ec52ba3c7f1b7d813cd52649a5b3ef1fc0d433219dc8c93827c57eab6cf888"}, + {file = "cryptography-42.0.4-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449"}, + {file = "cryptography-42.0.4-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18"}, + {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad9c385ba8ee025bb0d856714f71d7840020fe176ae0229de618f14dae7a6e2"}, + {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69b22ab6506a3fe483d67d1ed878e1602bdd5912a134e6202c1ec672233241c1"}, + {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e09469a2cec88fb7b078e16d4adec594414397e8879a4341c6ace96013463d5b"}, + {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3e970a2119507d0b104f0a8e281521ad28fc26f2820687b3436b8c9a5fcf20d1"}, + {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:e53dc41cda40b248ebc40b83b31516487f7db95ab8ceac1f042626bc43a2f992"}, + {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c3a5cbc620e1e17009f30dd34cb0d85c987afd21c41a74352d1719be33380885"}, + {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6bfadd884e7280df24d26f2186e4e07556a05d37393b0f220a840b083dc6a824"}, + {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:01911714117642a3f1792c7f376db572aadadbafcd8d75bb527166009c9f1d1b"}, + {file = "cryptography-42.0.4-cp37-abi3-win32.whl", hash = "sha256:fb0cef872d8193e487fc6bdb08559c3aa41b659a7d9be48b2e10747f47863925"}, + {file = "cryptography-42.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:c1f25b252d2c87088abc8bbc4f1ecbf7c919e05508a7e8628e6875c40bc70923"}, + {file = "cryptography-42.0.4-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:15a1fb843c48b4a604663fa30af60818cd28f895572386e5f9b8a665874c26e7"}, + {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1327f280c824ff7885bdeef8578f74690e9079267c1c8bd7dc5cc5aa065ae52"}, + {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ffb03d419edcab93b4b19c22ee80c007fb2d708429cecebf1dd3258956a563a"}, + {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:1df6fcbf60560d2113b5ed90f072dc0b108d64750d4cbd46a21ec882c7aefce9"}, + {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:44a64043f743485925d3bcac548d05df0f9bb445c5fcca6681889c7c3ab12764"}, + {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:3c6048f217533d89f2f8f4f0fe3044bf0b2090453b7b73d0b77db47b80af8dff"}, + {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6d0fbe73728c44ca3a241eff9aefe6496ab2656d6e7a4ea2459865f2e8613257"}, + {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:887623fe0d70f48ab3f5e4dbf234986b1329a64c066d719432d0698522749929"}, + {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ce8613beaffc7c14f091497346ef117c1798c202b01153a8cc7b8e2ebaaf41c0"}, + {file = "cryptography-42.0.4-cp39-abi3-win32.whl", hash = "sha256:810bcf151caefc03e51a3d61e53335cd5c7316c0a105cc695f0959f2c638b129"}, + {file = "cryptography-42.0.4-cp39-abi3-win_amd64.whl", hash = "sha256:a0298bdc6e98ca21382afe914c642620370ce0470a01e1bef6dd9b5354c36854"}, + {file = "cryptography-42.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f8907fcf57392cd917892ae83708761c6ff3c37a8e835d7246ff0ad251d9298"}, + {file = "cryptography-42.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:12d341bd42cdb7d4937b0cabbdf2a94f949413ac4504904d0cdbdce4a22cbf88"}, + {file = "cryptography-42.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1cdcdbd117681c88d717437ada72bdd5be9de117f96e3f4d50dab3f59fd9ab20"}, + {file = "cryptography-42.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0e89f7b84f421c56e7ff69f11c441ebda73b8a8e6488d322ef71746224c20fce"}, + {file = "cryptography-42.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f1e85a178384bf19e36779d91ff35c7617c885da487d689b05c1366f9933ad74"}, + {file = "cryptography-42.0.4-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d2a27aca5597c8a71abbe10209184e1a8e91c1fd470b5070a2ea60cafec35bcd"}, + {file = "cryptography-42.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4e36685cb634af55e0677d435d425043967ac2f3790ec652b2b88ad03b85c27b"}, + {file = "cryptography-42.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660"}, + {file = "cryptography-42.0.4.tar.gz", hash = "sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb"}, ] [package.dependencies] @@ -761,6 +772,17 @@ ssh = ["bcrypt (>=3.1.5)"] test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +optional = false +python-versions = ">=3.5" +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + [[package]] name = "deepdiff" version = "6.7.1" @@ -1256,6 +1278,20 @@ files = [ [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "executing" +version = "2.0.1" +description = "Get the currently executing AST node of a frame, and other information" +optional = false +python-versions = ">=3.5" +files = [ + {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, + {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, +] + +[package.extras] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] + [[package]] name = "flake8" version = "5.0.4" @@ -1274,12 +1310,13 @@ pyflakes = ">=2.5.0,<2.6.0" [[package]] name = "future" -version = "0.18.3" +version = "1.0.0" description = "Clean single-source support for Python 3 and 2" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ - {file = "future-0.18.3.tar.gz", hash = "sha256:34a17436ed1e96697a86f9de3d15a3b0be01d8bc8de9c1dffd59fb8234ed5307"}, + {file = "future-1.0.0-py3-none-any.whl", hash = "sha256:929292d34f5872e70396626ef385ec22355a1fae8ad29e1a734c3e43f9fbc216"}, + {file = "future-1.0.0.tar.gz", hash = "sha256:bd2968309307861edae1458a4f8a4f3598c03be43b97521076aebf5d94c07b05"}, ] [[package]] @@ -1315,20 +1352,20 @@ smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.41" +version = "3.1.42" description = "GitPython is a Python library used to interact with Git repositories" optional = false python-versions = ">=3.7" files = [ - {file = "GitPython-3.1.41-py3-none-any.whl", hash = "sha256:c36b6634d069b3f719610175020a9aed919421c87552185b085e04fbbdb10b7c"}, - {file = "GitPython-3.1.41.tar.gz", hash = "sha256:ed66e624884f76df22c8e16066d567aaa5a37d5b5fa19db2c6df6f7156db9048"}, + {file = "GitPython-3.1.42-py3-none-any.whl", hash = "sha256:1bf9cd7c9e7255f77778ea54359e54ac22a72a5b51288c457c881057b7bb9ecd"}, + {file = "GitPython-3.1.42.tar.gz", hash = "sha256:2d99869e0fef71a73cbd242528105af1d6c1b108c60dfabd994bf292f76c3ceb"}, ] [package.dependencies] gitdb = ">=4.0.1,<5" [package.extras] -test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "sumtypes"] +test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar"] [[package]] name = "gprof2dot" @@ -2270,13 +2307,13 @@ typing-extensions = ">=4.3.0" [[package]] name = "nautobot" -version = "2.1.4" +version = "2.1.5" description = "Source of truth and network automation platform." optional = false python-versions = ">=3.8,<3.12" files = [ - {file = "nautobot-2.1.4-py3-none-any.whl", hash = "sha256:b1311cb8bda428ee1b5b7074ce75ef99aaffd31a29207a69339fa92cea2de729"}, - {file = "nautobot-2.1.4.tar.gz", hash = "sha256:50e64ba399485631fc694c489b3b47a3c300f7914f8856cff2819d076474245b"}, + {file = "nautobot-2.1.5-py3-none-any.whl", hash = "sha256:62ab0115349f7c75c675f93324644e519d5fde5dd0c7ed3baa40a1c20d45118a"}, + {file = "nautobot-2.1.5.tar.gz", hash = "sha256:4c36d03592210c17bc899dd9cfcb05c26ad720727e6869d928540611aefa2f55"}, ] [package.dependencies] @@ -2352,20 +2389,20 @@ nautobot = ["nautobot (>=2.0.0,<3.0.0)"] [[package]] name = "nautobot-ssot" -version = "2.2.0" +version = "2.3.0" description = "Nautobot Single Source of Truth" optional = false python-versions = ">=3.8,<3.12" files = [ - {file = "nautobot_ssot-2.2.0-py3-none-any.whl", hash = "sha256:0abd1139f45c438a9298d341a2a1792cbfbc4381628f16cc9eedf03beb1c063d"}, - {file = "nautobot_ssot-2.2.0.tar.gz", hash = "sha256:2ca0871737d586bcc660e6857e4f446f1d1a7859c13a395570b59288ae4be2c4"}, + {file = "nautobot_ssot-2.3.0-py3-none-any.whl", hash = "sha256:7db10e0b80515fda56b3ec1b2f8f3bc51c674bb59b07033433d3e32ed531b0c5"}, + {file = "nautobot_ssot-2.3.0.tar.gz", hash = "sha256:63841e4db86c1f8be2e829a489dc2884be5f36082595f823f1776131a01bd4f0"}, ] [package.dependencies] diffsync = ">=1.6.0,<2.0.0" drf-spectacular = "0.26.3" Markdown = "!=3.3.5" -nautobot = ">=2.0.0,<3.0.0" +nautobot = ">=2.1.0,<3.0.0" packaging = ">=21.3,<24" prometheus-client = ">=0.17.1,<0.18.0" @@ -2567,13 +2604,13 @@ nornir = ">=3,<4" [[package]] name = "ntc-templates" -version = "4.2.0" +version = "4.3.0" description = "TextFSM Templates for Network Devices, and Python wrapper for TextFSM's CliTable." optional = false python-versions = ">=3.8,<4.0" files = [ - {file = "ntc_templates-4.2.0-py3-none-any.whl", hash = "sha256:f41471c1375c1a86bb5958358339efe9e95d908ea33866125adafe36fbfe11dd"}, - {file = "ntc_templates-4.2.0.tar.gz", hash = "sha256:a06c0e786aa3aea429d345ea67f59cb6da43557c31aa65914969d0cd6b0c0dde"}, + {file = "ntc_templates-4.3.0-py3-none-any.whl", hash = "sha256:f9b4805dfd9d1516a29ae9f505409c17c7f14c958d47f1c1f57c9486af6164db"}, + {file = "ntc_templates-4.3.0.tar.gz", hash = "sha256:b6902389e86b868d76b64ea55c8225a0aa7aafe910b3a02b2a33b7b18fb27ef1"}, ] [package.dependencies] @@ -3038,23 +3075,6 @@ typing-extensions = ">=4.2.0" dotenv = ["python-dotenv (>=0.10.4)"] email = ["email-validator (>=1.0.3)"] -[[package]] -name = "pydocstyle" -version = "6.3.0" -description = "Python docstring style checker" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, - {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, -] - -[package.dependencies] -snowballstemmer = ">=2.2.0" - -[package.extras] -toml = ["tomli (>=1.2.3)"] - [[package]] name = "pyeapi" version = "1.0.2" @@ -3864,28 +3884,28 @@ files = [ [[package]] name = "ruff" -version = "0.1.15" +version = "0.2.2" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5fe8d54df166ecc24106db7dd6a68d44852d14eb0729ea4672bb4d96c320b7df"}, - {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f0bfbb53c4b4de117ac4d6ddfd33aa5fc31beeaa21d23c45c6dd249faf9126f"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d432aec35bfc0d800d4f70eba26e23a352386be3a6cf157083d18f6f5881c8"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9405fa9ac0e97f35aaddf185a1be194a589424b8713e3b97b762336ec79ff807"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66ec24fe36841636e814b8f90f572a8c0cb0e54d8b5c2d0e300d28a0d7bffec"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6f8ad828f01e8dd32cc58bc28375150171d198491fc901f6f98d2a39ba8e3ff5"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86811954eec63e9ea162af0ffa9f8d09088bab51b7438e8b6488b9401863c25e"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd4025ac5e87d9b80e1f300207eb2fd099ff8200fa2320d7dc066a3f4622dc6b"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b17b93c02cdb6aeb696effecea1095ac93f3884a49a554a9afa76bb125c114c1"}, - {file = "ruff-0.1.15-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ddb87643be40f034e97e97f5bc2ef7ce39de20e34608f3f829db727a93fb82c5"}, - {file = "ruff-0.1.15-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:abf4822129ed3a5ce54383d5f0e964e7fef74a41e48eb1dfad404151efc130a2"}, - {file = "ruff-0.1.15-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6c629cf64bacfd136c07c78ac10a54578ec9d1bd2a9d395efbee0935868bf852"}, - {file = "ruff-0.1.15-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1bab866aafb53da39c2cadfb8e1c4550ac5340bb40300083eb8967ba25481447"}, - {file = "ruff-0.1.15-py3-none-win32.whl", hash = "sha256:2417e1cb6e2068389b07e6fa74c306b2810fe3ee3476d5b8a96616633f40d14f"}, - {file = "ruff-0.1.15-py3-none-win_amd64.whl", hash = "sha256:3837ac73d869efc4182d9036b1405ef4c73d9b1f88da2413875e34e0d6919587"}, - {file = "ruff-0.1.15-py3-none-win_arm64.whl", hash = "sha256:9a933dfb1c14ec7a33cceb1e49ec4a16b51ce3c20fd42663198746efc0427360"}, - {file = "ruff-0.1.15.tar.gz", hash = "sha256:f6dfa8c1b21c913c326919056c390966648b680966febcb796cc9d1aaab8564e"}, + {file = "ruff-0.2.2-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0a9efb032855ffb3c21f6405751d5e147b0c6b631e3ca3f6b20f917572b97eb6"}, + {file = "ruff-0.2.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d450b7fbff85913f866a5384d8912710936e2b96da74541c82c1b458472ddb39"}, + {file = "ruff-0.2.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecd46e3106850a5c26aee114e562c329f9a1fbe9e4821b008c4404f64ff9ce73"}, + {file = "ruff-0.2.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e22676a5b875bd72acd3d11d5fa9075d3a5f53b877fe7b4793e4673499318ba"}, + {file = "ruff-0.2.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1695700d1e25a99d28f7a1636d85bafcc5030bba9d0578c0781ba1790dbcf51c"}, + {file = "ruff-0.2.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:b0c232af3d0bd8f521806223723456ffebf8e323bd1e4e82b0befb20ba18388e"}, + {file = "ruff-0.2.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f63d96494eeec2fc70d909393bcd76c69f35334cdbd9e20d089fb3f0640216ca"}, + {file = "ruff-0.2.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a61ea0ff048e06de273b2e45bd72629f470f5da8f71daf09fe481278b175001"}, + {file = "ruff-0.2.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e1439c8f407e4f356470e54cdecdca1bd5439a0673792dbe34a2b0a551a2fe3"}, + {file = "ruff-0.2.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:940de32dc8853eba0f67f7198b3e79bc6ba95c2edbfdfac2144c8235114d6726"}, + {file = "ruff-0.2.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:0c126da55c38dd917621552ab430213bdb3273bb10ddb67bc4b761989210eb6e"}, + {file = "ruff-0.2.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:3b65494f7e4bed2e74110dac1f0d17dc8e1f42faaa784e7c58a98e335ec83d7e"}, + {file = "ruff-0.2.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1ec49be4fe6ddac0503833f3ed8930528e26d1e60ad35c2446da372d16651ce9"}, + {file = "ruff-0.2.2-py3-none-win32.whl", hash = "sha256:d920499b576f6c68295bc04e7b17b6544d9d05f196bb3aac4358792ef6f34325"}, + {file = "ruff-0.2.2-py3-none-win_amd64.whl", hash = "sha256:cc9a91ae137d687f43a44c900e5d95e9617cb37d4c989e462980ba27039d239d"}, + {file = "ruff-0.2.2-py3-none-win_arm64.whl", hash = "sha256:c9d15fc41e6054bfc7200478720570078f0b41c9ae4f010bcc16bd6f4d1aacdd"}, + {file = "ruff-0.2.2.tar.gz", hash = "sha256:e62ed7f36b3068a30ba39193a14274cd706bc486fad521276458022f7bccb31d"}, ] [[package]] @@ -3976,17 +3996,6 @@ files = [ {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, ] -[[package]] -name = "snowballstemmer" -version = "2.2.0" -description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -optional = false -python-versions = "*" -files = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, -] - [[package]] name = "social-auth-app-django" version = "5.2.0" @@ -4444,13 +4453,13 @@ files = [ [[package]] name = "yamllint" -version = "1.34.0" +version = "1.35.1" description = "A linter for YAML files." optional = false python-versions = ">=3.8" files = [ - {file = "yamllint-1.34.0-py3-none-any.whl", hash = "sha256:33b813f6ff2ffad2e57a288281098392b85f7463ce1f3d5cd45aa848b916a806"}, - {file = "yamllint-1.34.0.tar.gz", hash = "sha256:7f0a6a41e8aab3904878da4ae34b6248b6bc74634e0d3a90f0fb2d7e723a3d4f"}, + {file = "yamllint-1.35.1-py3-none-any.whl", hash = "sha256:2e16e504bb129ff515b37823b472750b36b6de07963bd74b307341ef5ad8bdc3"}, + {file = "yamllint-1.35.1.tar.gz", hash = "sha256:7a003809f88324fd2c877734f2d575ee7881dd9043360657cc8049c809eba6cd"}, ] [package.dependencies] @@ -4495,4 +4504,4 @@ all = [] [metadata] lock-version = "2.0" python-versions = ">=3.8,<3.12" -content-hash = "ff0b9f5780232b35c98dca2dcfb617b37c2c4f9faca60df0d21ef7400144a830" +content-hash = "1a27531e336ada6a38d6d600445f835e0821f635efe7b73b5974c4665df5bb03" diff --git a/tasks.py b/tasks.py index f363df4c..6bd18e00 100644 --- a/tasks.py +++ b/tasks.py @@ -500,9 +500,11 @@ def import_db(context, db_name="", input_file="dump.sql"): '--execute="', f"DROP DATABASE IF EXISTS {db_name};", f"CREATE DATABASE {db_name};", - "" - if db_name == "$MYSQL_DATABASE" - else f"GRANT ALL PRIVILEGES ON {db_name}.* TO $MYSQL_USER; FLUSH PRIVILEGES;", + ( + "" + if db_name == "$MYSQL_DATABASE" + else f"GRANT ALL PRIVILEGES ON {db_name}.* TO $MYSQL_USER; FLUSH PRIVILEGES;" + ), '"', "&&", "mysql", From e60eadd31e2d3666e17f926ba2163c18f1e5d9cd Mon Sep 17 00:00:00 2001 From: David Cates Date: Wed, 21 Feb 2024 14:42:35 -0700 Subject: [PATCH 098/225] black --- nautobot_device_onboarding/utils/formatter.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index 5e19f193..f5c5d91f 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -22,10 +22,10 @@ def get_django_env(): """ # Use a custom Jinja2 environment instead of Django's to avoid HTML escaping j2_env = { - "undefined": "jinja2.StrictUndefined", - "trim_blocks": True, - "lstrip_blocks": False, - } + "undefined": "jinja2.StrictUndefined", + "trim_blocks": True, + "lstrip_blocks": False, + } if isinstance(j2_env["undefined"], str): j2_env["undefined"] = import_string(j2_env["undefined"]) jinja_env = SandboxedEnvironment(**j2_env) @@ -109,7 +109,9 @@ def extract_show_data(host, multi_result, command_getter_type): j2_rendered_jpath = render_jinja_template(obj=host.name, template=command_info["jpath"]) extracted_value = extract_data_from_json(multi_result[0].result, j2_rendered_jpath) if command_info.get("post_processor"): - extracted_processed = render_jinja_template(obj=extracted_value, template=command_info["post_processor"]) + extracted_processed = render_jinja_template( + obj=extracted_value, template=command_info["post_processor"] + ) else: extracted_processed = extracted_value if isinstance(extracted_value, list) and len(extracted_value) == 1: From 44024cad34543818f26e69f87fbacb8256ac3154 Mon Sep 17 00:00:00 2001 From: David Cates Date: Wed, 21 Feb 2024 14:45:08 -0700 Subject: [PATCH 099/225] fix juniper_junos.yml --- nautobot_device_onboarding/command_mappers/juniper_junos.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/juniper_junos.yml b/nautobot_device_onboarding/command_mappers/juniper_junos.yml index a98fc7ed..7524e8b0 100755 --- a/nautobot_device_onboarding/command_mappers/juniper_junos.yml +++ b/nautobot_device_onboarding/command_mappers/juniper_junos.yml @@ -3,10 +3,10 @@ device_onboarding: use_textfsm: false hostname: command: "show version | display json" - jpath: '"software-information"[0]."host-name"[0].data' + jpath: "'software-information'[0]."host-name"[0].data" serial: command: "show version | display json" - jpath: "'chassis-inventory"[0].'chassis'[0].'serial-number'[0].data" + jpath: "'chassis-inventory'[0].'chassis'[0].'serial-number'[0].data" device_type: command: "show version | display json" jpath: "'software-information'[0].'product-model'[0].data" From d56ff7898d6264654535ecd943ed77eb68a8e5aa Mon Sep 17 00:00:00 2001 From: David Cates Date: Wed, 21 Feb 2024 14:48:12 -0700 Subject: [PATCH 100/225] juniper_junos.yml formatting --- nautobot_device_onboarding/command_mappers/juniper_junos.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/command_mappers/juniper_junos.yml b/nautobot_device_onboarding/command_mappers/juniper_junos.yml index 7524e8b0..53f3be46 100755 --- a/nautobot_device_onboarding/command_mappers/juniper_junos.yml +++ b/nautobot_device_onboarding/command_mappers/juniper_junos.yml @@ -3,7 +3,7 @@ device_onboarding: use_textfsm: false hostname: command: "show version | display json" - jpath: "'software-information'[0]."host-name"[0].data" + jpath: "'software-information'[0].'host-name'[0].data" serial: command: "show version | display json" jpath: "'chassis-inventory'[0].'chassis'[0].'serial-number'[0].data" From 80688d919bf6622048db9fb101d9434a886e8517 Mon Sep 17 00:00:00 2001 From: David Cates Date: Wed, 21 Feb 2024 18:01:36 -0700 Subject: [PATCH 101/225] update ssot network adapter, models and mock data --- .../adapters/network_importer_adapters.py | 188 +++++------ .../diffsync/mock_data.py | 306 +++++++++--------- .../models/network_importer_models.py | 2 - nautobot_device_onboarding/jobs.py | 18 ++ 4 files changed, 268 insertions(+), 246 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index 1170e124..b14f3d16 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -63,9 +63,10 @@ def load_ip_addresses(self): """ ip_address_hosts = set() for _, device_data in self.job.command_getter_result.items(): - for _, interface_data in device_data["interfaces"].items(): - for ip_address in interface_data["ip_addresses"]: - ip_address_hosts.add(ip_address["host"]) + for interface in device_data["interfaces"]: + for _, interface_data in interface.items(): + for ip_address in interface_data["ip_addresses"]: + ip_address_hosts.add(ip_address["host"]) for ip_address in IPAddress.objects.filter( host__in=ip_address_hosts, @@ -268,11 +269,12 @@ def load_devices(self): self.add(network_device) if self.job.debug: self.job.logger.debug(f"Device {network_device} loaded.") - for interface_name, interface_data in device_data["interfaces"].items(): - network_interface = self.load_interface(hostname, interface_name, interface_data) - network_device.add_child(network_interface) - if self.job.debug: - self.job.logger.debug(f"Interface {network_interface} loaded.") + for interface in device_data["interfaces"]: + for interface_name, interface_data in interface.items(): + network_interface = self.load_interface(hostname, interface_name, interface_data) + network_device.add_child(network_interface) + if self.job.debug: + self.job.logger.debug(f"Interface {network_interface} loaded.") def load_interface(self, hostname, interface_name, interface_data): """Load an interface into the DiffSync store.""" @@ -280,14 +282,13 @@ def load_interface(self, hostname, interface_name, interface_data): diffsync=self, name=interface_name, device__name=hostname, - status__name=interface_data["status"], + status__name=self.job.interface_status.name, type=interface_data["type"], mac_address=self._process_mac_address(interface_data["mac_address"]), mtu=interface_data["mtu"], description=interface_data["description"], enabled=interface_data["enabled"], mode=interface_data["802.1Q_mode"], - mgmt_only=interface_data["mgmt_only"], untagged_vlan__name=interface_data["untagged_vlan"]["name"] if interface_data["untagged_vlan"] else None, ) self.add(network_interface) @@ -298,27 +299,28 @@ def load_interface(self, hostname, interface_name, interface_data): def load_ip_addresses(self): """Load IP addresses into the DiffSync store.""" for hostname, device_data in self.job.command_getter_result.items(): - for interface_name, interface_data in device_data["interfaces"].items(): - for ip_address in interface_data["ip_addresses"]: - if self.job.debug: - self.job.logger.debug(f"Loading {ip_address} from {interface_name} on {hostname}") - network_ip_address = self.ip_address( - diffsync=self, - host=ip_address["host"], - mask_length=ip_address["mask_length"], - type="host", - ip_version=4, - status__name=self.job.ip_address_status.name, - ) - try: - self.add(network_ip_address) + for interface in device_data["interfaces"]: + for interface_name, interface_data in interface.items(): + for ip_address in interface_data["ip_addresses"]: if self.job.debug: - self.job.logger.debug(f"{network_ip_address} loaded.") - except diffsync.exceptions.ObjectAlreadyExists: - self.job.logger.warning( - f"{network_ip_address} is already loaded to the " - "DiffSync store. This is a duplicate IP Address." + self.job.logger.debug(f"Loading {ip_address} from {interface_name} on {hostname}") + network_ip_address = self.ip_address( + diffsync=self, + host=ip_address["host"], + mask_length=ip_address["mask_length"], + type="host", + ip_version=4, + status__name=self.job.ip_address_status.name, ) + try: + self.add(network_ip_address) + if self.job.debug: + self.job.logger.debug(f"{network_ip_address} loaded.") + except diffsync.exceptions.ObjectAlreadyExists: + self.job.logger.warning( + f"{network_ip_address} is already loaded to the " + "DiffSync store. This is a duplicate IP Address." + ) def load_vlans(self): """Load vlans into the Diffsync store.""" @@ -327,83 +329,87 @@ def load_vlans(self): location_names[device.name] = device.location.name for hostname, device_data in self.job.command_getter_result.items(): - for _, interface_data in device_data["interfaces"].items(): - # add tagged vlans - for tagged_vlan in interface_data["tagged_vlans"]: - network_vlan = self.vlan( - diffsync=self, - name=tagged_vlan["name"], - vid=tagged_vlan["id"], - location__name=location_names.get(hostname, ""), - ) - try: - self.add(network_vlan) - if self.job.debug: - self.job.logger.debug(f"Tagged Vlan {network_vlan} loaded.") - except diffsync.exceptions.ObjectAlreadyExists: - pass - # check for untagged vlan and add if necessary - if interface_data["untagged_vlan"]: - network_vlan = self.vlan( - diffsync=self, - name=interface_data["untagged_vlan"]["name"], - vid=interface_data["untagged_vlan"]["id"], - location__name=location_names.get(hostname, ""), - ) - try: - self.add(network_vlan) - if self.job.debug: - self.job.logger.debug(f"Untagged Vlan {network_vlan} loaded.") - except diffsync.exceptions.ObjectAlreadyExists: - pass + for interface in device_data["interfaces"]: + for _, interface_data in interface.items(): + # add tagged vlans + for tagged_vlan in interface_data["tagged_vlans"]: + network_vlan = self.vlan( + diffsync=self, + name=tagged_vlan["name"], + vid=tagged_vlan["id"], + location__name=location_names.get(hostname, ""), + ) + try: + self.add(network_vlan) + if self.job.debug: + self.job.logger.debug(f"Tagged Vlan {network_vlan} loaded.") + except diffsync.exceptions.ObjectAlreadyExists: + pass + # check for untagged vlan and add if necessary + if interface_data["untagged_vlan"]: + network_vlan = self.vlan( + diffsync=self, + name=interface_data["untagged_vlan"]["name"], + vid=interface_data["untagged_vlan"]["id"], + location__name=location_names.get(hostname, ""), + ) + try: + self.add(network_vlan) + if self.job.debug: + self.job.logger.debug(f"Untagged Vlan {network_vlan} loaded.") + except diffsync.exceptions.ObjectAlreadyExists: + pass def load_ip_address_to_interfaces(self): """Load ip address interface assignments into the Diffsync store.""" for hostname, device_data in self.job.command_getter_result.items(): - for interface_name, interface_data in device_data["interfaces"].items(): - for ip_address in interface_data["ip_addresses"]: - network_ip_address_to_interface = self.ipaddress_to_interface( - diffsync=self, - interface__device__name=hostname, - interface__name=interface_name, - ip_address__host=ip_address["host"], - ip_address__mask_length=ip_address["mask_length"], - ) - self.add(network_ip_address_to_interface) - if self.job.debug: - self.job.logger.debug(f"IP Address to interface {network_ip_address_to_interface} loaded.") + for interface in device_data["interfaces"]: + for interface_name, interface_data in interface.items(): + for ip_address in interface_data["ip_addresses"]: + network_ip_address_to_interface = self.ipaddress_to_interface( + diffsync=self, + interface__device__name=hostname, + interface__name=interface_name, + ip_address__host=ip_address["host"], + ip_address__mask_length=ip_address["mask_length"], + ) + self.add(network_ip_address_to_interface) + if self.job.debug: + self.job.logger.debug(f"IP Address to interface {network_ip_address_to_interface} loaded.") def load_tagged_vlans_to_interface(self): """Load tagged vlan to interface assignments into the Diffsync store.""" for hostname, device_data in self.job.command_getter_result.items(): - for interface_name, interface_data in device_data["interfaces"].items(): - network_tagged_vlans_to_interface = self.tagged_vlans_to_interface( - diffsync=self, - device__name=hostname, - name=interface_name, - tagged_vlans=interface_data["tagged_vlans"], - ) - self.add(network_tagged_vlans_to_interface) - if self.job.debug: - self.job.logger.debug(f"Tagged Vlan to interface {network_tagged_vlans_to_interface} loaded.") + for interface in device_data["interfaces"]: + for interface_name, interface_data in interface.items(): + network_tagged_vlans_to_interface = self.tagged_vlans_to_interface( + diffsync=self, + device__name=hostname, + name=interface_name, + tagged_vlans=interface_data["tagged_vlans"], + ) + self.add(network_tagged_vlans_to_interface) + if self.job.debug: + self.job.logger.debug(f"Tagged Vlan to interface {network_tagged_vlans_to_interface} loaded.") def load_lag_to_interface(self): """Load lag interface assignments into the Diffsync store.""" for hostname, device_data in self.job.command_getter_result.items(): - for interface_name, interface_data in device_data["interfaces"].items(): - network_lag_to_interface = self.lag_to_interface( - diffsync=self, - device__name=hostname, - name=interface_name, - lag__interface__name=interface_data["lag"] if interface_data["lag"] else "", - ) - self.add(network_lag_to_interface) - if self.job.debug: - self.job.logger.debug(f"Lag to interface {network_lag_to_interface} loaded.") + for interface in device_data["interfaces"]: + for interface_name, interface_data in interface.items(): + network_lag_to_interface = self.lag_to_interface( + diffsync=self, + device__name=hostname, + name=interface_name, + lag__interface__name=interface_data["lag"] if interface_data["lag"] else "", + ) + self.add(network_lag_to_interface) + if self.job.debug: + self.job.logger.debug(f"Lag to interface {network_lag_to_interface} loaded.") def load(self): """Load network data.""" - self.execute_command_getter() + # self.execute_command_getter() self.load_ip_addresses() if self.job.sync_vlans: self.load_vlans() diff --git a/nautobot_device_onboarding/diffsync/mock_data.py b/nautobot_device_onboarding/diffsync/mock_data.py index 5b00c618..57d08e61 100644 --- a/nautobot_device_onboarding/diffsync/mock_data.py +++ b/nautobot_device_onboarding/diffsync/mock_data.py @@ -5,170 +5,170 @@ network_importer_mock_data = { "demo-cisco-xe1": { "serial": "9ABUXU581111", - "interfaces": { - "GigabitEthernet1": { - "mgmt_only": True, - "status": "Active", - "type": "100base-tx", - "ip_addresses": [ - {"host": "10.1.1.8", "mask_length": 16}, - ], - "mac_address": "d8b1.905c.7130", - "mtu": "1500", - "description": "", - "enabled": True, - "802.1Q_mode": "tagged", - "lag": "", - "untagged_vlan": {"name": "vlan60", "id": "60"}, - "tagged_vlans": [{"name": "vlan40", "id": "40"}], + "interfaces": [ + { + "GigabitEthernet1": { + "type": "100base-tx", + "ip_addresses": [ + {"host": "10.1.1.8", "mask_length": 16}, + ], + "mac_address": "d8b1.905c.7130", + "mtu": "1500", + "description": "", + "enabled": True, + "802.1Q_mode": "tagged", + "lag": "", + "untagged_vlan": {"name": "vlan60", "id": "60"}, + "tagged_vlans": [{"name": "vlan40", "id": "40"}], + } }, - "GigabitEthernet2": { - "mgmt_only": False, - "status": "Active", - "type": "100base-tx", - "ip_addresses": [ - {"host": "10.1.1.9", "mask_length": 24}, - ], - "mac_address": "d8b1.905c.7131", - "mtu": "1500", - "description": "uplink Po1", - "enabled": True, - "802.1Q_mode": "", - "lag": "Po2", - "untagged_vlan": "", - "tagged_vlans": [], + { + "GigabitEthernet2": { + "type": "100base-tx", + "ip_addresses": [ + {"host": "10.1.1.9", "mask_length": 24}, + ], + "mac_address": "d8b1.905c.7131", + "mtu": "1500", + "description": "uplink Po1", + "enabled": True, + "802.1Q_mode": "", + "lag": "Po2", + "untagged_vlan": "", + "tagged_vlans": [], + } }, - "GigabitEthernet3": { - "mgmt_only": False, - "status": "Active", - "type": "100base-tx", - "ip_addresses": [ - {"host": "10.1.1.10", "mask_length": 24}, - {"host": "10.1.1.11", "mask_length": 22}, - ], - "mac_address": "d8b1.905c.7132", - "mtu": "1500", - "description": "", - "enabled": True, - "802.1Q_mode": "tagged", - "lag": "Po1", - "untagged_vlan": "", - "tagged_vlans": [{"name": "vlan40", "id": "40"}, {"name": "vlan50", "id": "50"}], + { + "GigabitEthernet3": { + "type": "100base-tx", + "ip_addresses": [ + {"host": "10.1.1.10", "mask_length": 24}, + {"host": "10.1.1.11", "mask_length": 22}, + ], + "mac_address": "d8b1.905c.7132", + "mtu": "1500", + "description": "", + "enabled": True, + "802.1Q_mode": "tagged", + "lag": "Po1", + "untagged_vlan": "", + "tagged_vlans": [{"name": "vlan40", "id": "40"}, {"name": "vlan50", "id": "50"}], + } }, - "GigabitEthernet4": { - "mgmt_only": False, - "status": "Active", - "type": "100base-tx", - "ip_addresses": [ - {"host": "10.1.1.12", "mask_length": 20}, - ], - "mac_address": "d8b1.905c.7133", - "mtu": "1500", - "description": "", - "enabled": True, - "802.1Q_mode": "", - "lag": "", - "untagged_vlan": "", - "tagged_vlans": [], + { + "GigabitEthernet4": { + "type": "100base-tx", + "ip_addresses": [ + {"host": "10.1.1.12", "mask_length": 20}, + ], + "mac_address": "d8b1.905c.7133", + "mtu": "1500", + "description": "", + "enabled": True, + "802.1Q_mode": "", + "lag": "", + "untagged_vlan": "", + "tagged_vlans": [], + } }, - "Po1": { - "mgmt_only": False, - "status": "Active", - "type": "lag", - "ip_addresses": [], - "mac_address": "d8b1.905c.7134", - "mtu": "1500", - "description": "", - "enabled": True, - "802.1Q_mode": "", - "lag": "", - "untagged_vlan": "", - "tagged_vlans": [], + { + "Po1": { + "type": "lag", + "ip_addresses": [], + "mac_address": "d8b1.905c.7134", + "mtu": "1500", + "description": "", + "enabled": True, + "802.1Q_mode": "", + "lag": "", + "untagged_vlan": "", + "tagged_vlans": [], + } }, - "Po2": { - "mgmt_only": False, - "status": "Active", - "type": "lag", - "ip_addresses": [], - "mac_address": "d8b1.905c.7135", - "mtu": "1500", - "description": "", - "enabled": True, - "802.1Q_mode": "", - "lag": "", - "untagged_vlan": "", - "tagged_vlans": [], - }, - }, + { + "Po2": { + "type": "lag", + "ip_addresses": [], + "mac_address": "d8b1.905c.7135", + "mtu": "1500", + "description": "", + "enabled": True, + "802.1Q_mode": "", + "lag": "", + "untagged_vlan": "", + "tagged_vlans": [], + } + } + ], }, "demo-cisco-xe2": { "serial": "9ABUXU5882222", - "interfaces": { - "GigabitEthernet1": { - "mgmt_only": True, - "status": "Active", - "type": "100base-tx", - "ip_addresses": [ - {"host": "10.1.2.8", "mask_length": 24}, - ], - "mac_address": "d8b1.905c.5170", - "mtu": "1500", - "description": "", - "enabled": True, - "802.1Q_mode": "tagged", - "lag": "", - "untagged_vlan": {"name": "vlan60", "id": "60"}, - "tagged_vlans": [{"name": "vlan40", "id": "40"}], - }, - "GigabitEthernet2": { - "mgmt_only": False, - "status": "Active", - "type": "100base-tx", - "ip_addresses": [ - {"host": "10.1.2.9", "mask_length": 24}, - ], - "mac_address": "d8b1.905c.5171", - "mtu": "1500", - "description": "uplink Po1", - "enabled": True, - "802.1Q_mode": "", - "lag": "Po1", - "untagged_vlan": "", - "tagged_vlans": [], + "interfaces": [ + { + "GigabitEthernet1": { + "type": "100base-tx", + "ip_addresses": [ + {"host": "10.1.2.8", "mask_length": 24}, + ], + "mac_address": "d8b1.905c.5170", + "mtu": "1500", + "description": "", + "enabled": True, + "802.1Q_mode": "tagged", + "lag": "", + "untagged_vlan": {"name": "vlan60", "id": "60"}, + "tagged_vlans": [{"name": "vlan40", "id": "40"}], + } }, - "GigabitEthernet3": { - "mgmt_only": False, - "status": "Active", - "type": "100base-tx", - "ip_addresses": [ - {"host": "10.1.2.10", "mask_length": 24}, - {"host": "10.1.2.11", "mask_length": 22}, - ], - "mac_address": "d8b1.905c.5172", - "mtu": "1500", - "description": "", - "enabled": True, - "802.1Q_mode": "tagged", - "lag": "Po1", - "untagged_vlan": "", - "tagged_vlans": [{"name": "vlan40", "id": "40"}, {"name": "vlan50", "id": "50"}], + { + "GigabitEthernet2": { + "type": "100base-tx", + "ip_addresses": [ + {"host": "10.1.2.9", "mask_length": 24}, + ], + "mac_address": "d8b1.905c.5171", + "mtu": "1500", + "description": "uplink Po1", + "enabled": True, + "802.1Q_mode": "", + "lag": "Po1", + "untagged_vlan": "", + "tagged_vlans": [], + } }, - "Po1": { - "mgmt_only": False, - "status": "Active", - "type": "lag", - "ip_addresses": [], - "mac_address": "d8b1.905c.5173", - "mtu": "1500", - "description": "", - "enabled": True, - "802.1Q_mode": "", - "lag": "", - "untagged_vlan": "", - "tagged_vlans": [], + { + "GigabitEthernet3": { + "type": "100base-tx", + "ip_addresses": [ + {"host": "10.1.2.10", "mask_length": 24}, + {"host": "10.1.2.11", "mask_length": 22}, + ], + "mac_address": "d8b1.905c.5172", + "mtu": "1500", + "description": "", + "enabled": True, + "802.1Q_mode": "tagged", + "lag": "Po1", + "untagged_vlan": "", + "tagged_vlans": [{"name": "vlan40", "id": "40"}, {"name": "vlan50", "id": "50"}], + } }, - }, - }, + { + "Po1": { + "type": "lag", + "ip_addresses": [], + "mac_address": "d8b1.905c.5173", + "mtu": "1500", + "description": "", + "enabled": True, + "802.1Q_mode": "", + "lag": "", + "untagged_vlan": "", + "tagged_vlans": [], + } + } + ] + } } device_onboarding_mock_data = { diff --git a/nautobot_device_onboarding/diffsync/models/network_importer_models.py b/nautobot_device_onboarding/diffsync/models/network_importer_models.py index b188e144..caca975f 100644 --- a/nautobot_device_onboarding/diffsync/models/network_importer_models.py +++ b/nautobot_device_onboarding/diffsync/models/network_importer_models.py @@ -104,7 +104,6 @@ class NetworkImporterInterface(FilteredNautobotModel): "mtu", # "parent_interface__name", "mode", - "mgmt_only", "untagged_vlan__name", ) @@ -118,7 +117,6 @@ class NetworkImporterInterface(FilteredNautobotModel): parent_interface__name: Optional[str] lag__name: Optional[str] mode: Optional[str] - mgmt_only: Optional[bool] untagged_vlan__name: Optional[str] diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 123f6b2d..25890f26 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -361,6 +361,15 @@ def __init__(self): """Initialize SSOTNetworkImporter.""" super().__init__() self.filtered_devices = None # Queryset of devices based on form inputs + + #################### FOR TESTING ONLY ######################################### + # from nautobot_device_onboarding.diffsync import mock_data + # from nautobot_device_onboarding.utils import diffsync_utils + # self.command_getter_result = mock_data.network_importer_mock_data + # self.devices_to_load = diffsync_utils.generate_device_querset_from_command_getter_result(mock_data.network_importer_mock_data) + ################### REMOVE WHEN NOT TESTING ################################### + + ############ RESTORE THESE LINES WHEN NOT TESTING! ############################ self.command_getter_result = None # Dict result from CommandGetter job self.devices_to_load = None # Queryset consisting of devices that responded @@ -378,6 +387,12 @@ class Meta: namespace = ObjectVar( model=Namespace, required=True, description="The namespace for all IP addresses created or updated in the sync." ) + interface_status = ObjectVar( + model=Status, + query_params={"content_types": "dcim.interface"}, + required=True, + description="Status to be applied to all synced device interfaces. This will update existing interface statuses.", + ) ip_address_status = ObjectVar( label="IP address status", model=Status, @@ -385,6 +400,7 @@ class Meta: required=True, description="Status to be applied to all synced IP addresses. This will update existing IP address statuses", ) + default_prefix_status = ObjectVar( model=Status, query_params={"content_types": "ipam.prefix"}, @@ -431,6 +447,7 @@ def run( memory_profiling, debug, namespace, + interface_status, ip_address_status, default_prefix_status, location, @@ -447,6 +464,7 @@ def run( self.debug = debug self.namespace = namespace self.ip_address_status = ip_address_status + self.interface_status = interface_status self.default_prefix_status = default_prefix_status self.location = location self.devices = devices From 35e26872d28de93f1064379671e5d78b110ac593 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Thu, 22 Feb 2024 15:01:59 -0600 Subject: [PATCH 102/225] ios NI initial working --- .../command_mappers/arista_eos.yml | 33 ++++-- .../command_mappers/cisco_ios.yml | 3 +- .../diffsync/mock_data.py | 8 +- nautobot_device_onboarding/utils/formatter.py | 102 ++++++++++-------- .../utils/jinja_filters.py | 21 ++++ 5 files changed, 105 insertions(+), 62 deletions(-) create mode 100755 nautobot_device_onboarding/utils/jinja_filters.py diff --git a/nautobot_device_onboarding/command_mappers/arista_eos.yml b/nautobot_device_onboarding/command_mappers/arista_eos.yml index 739f7857..a2b850da 100755 --- a/nautobot_device_onboarding/command_mappers/arista_eos.yml +++ b/nautobot_device_onboarding/command_mappers/arista_eos.yml @@ -1,19 +1,30 @@ --- device_onboarding: - use_textfsm: true + use_textfsm: false hostname: - command: "show hostname" - jpath: "[*].hostname" + # command: "show hostname" + # jpath: "[*].hostname" + command: "show hostname | json" + jpath: "'hostname'" serial: - command: "show version" - jpath: "[*].serial_number" + # command: "show version" + # jpath: "[*].serial_number" + command: "show version | json" + jpath: "'serialNumber'" device_type: - command: "show version" - jpath: "[*].model" + # command: "show version" + # jpath: "[*].model" + command: "show version | json" + jpath: "'modelName'" mgmt_interface: - command: "show ip interface brief" - jpath: "[?interface=='Management1'].interface" + # command: "show ip interface brief" + # jpath: "[?interface=='Management1'].interface" + command: "show ip interface brief | json" + #jpath: "[?interface=='Management1'].interface" + jpath: "[*].[$interface$,link_status,protocol_status,hardware_type,mac_address,ip_address,prefix_length,mtu]" mask_length: + # command: "show ip interface brief" + # jpath: "[?interface=='Management1'].ip_address" command: "show ip interface brief" - jpath: "[?interface=='Management1'].ip_address" - post_processor: "{{ obj[0] | ipaddress_interface('netmask') | netmask_to_cidr }}" + jpath: "$interfaces$.*.interfaceAddress[?ipAddr.address=='{{ obj }}']" + # post_processor: "{{ obj[0] | ipaddress_interface('netmask') | netmask_to_cidr }}" diff --git a/nautobot_device_onboarding/command_mappers/cisco_ios.yml b/nautobot_device_onboarding/command_mappers/cisco_ios.yml index 2634342e..c42ce447 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_ios.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_ios.yml @@ -24,4 +24,5 @@ network_importer: jpath: "[*].serial[0]" interfaces: command: "show interfaces" - jpath: "[*].[$interface$,link_status,protocol_status,hardware_type,mac_address,ip_address,prefix_length,mtu]" + jpath: "[*].[$interface$,hardware_type,ip_address,prefix_length,mac_address,mtu,description,link_status,vlan_id,vlan_id_inner,vlan_id_outer]" + post_processor: "{{ obj | fix_interfaces }}" diff --git a/nautobot_device_onboarding/diffsync/mock_data.py b/nautobot_device_onboarding/diffsync/mock_data.py index 57d08e61..ffaa19d3 100644 --- a/nautobot_device_onboarding/diffsync/mock_data.py +++ b/nautobot_device_onboarding/diffsync/mock_data.py @@ -98,7 +98,7 @@ "untagged_vlan": "", "tagged_vlans": [], } - } + }, ], }, "demo-cisco-xe2": { @@ -166,9 +166,9 @@ "untagged_vlan": "", "tagged_vlans": [], } - } - ] - } + }, + ], + }, } device_onboarding_mock_data = { diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index f5c5d91f..9baeb4e1 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -6,10 +6,13 @@ from django.template import engines from django.utils.module_loading import import_string from jdiff import extract_data_from_json -from jinja2 import exceptions as jinja_errors + +# from jinja2 import exceptions as jinja_errors from jinja2.sandbox import SandboxedEnvironment from nautobot.core.utils.data import render_jinja2 -from nautobot_device_onboarding.exceptions import OnboardException + +# from nautobot_device_onboarding.exceptions import OnboardException +from nautobot_device_onboarding.utils.jinja_filters import fix_interfaces DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), "command_mappers")) @@ -30,48 +33,49 @@ def get_django_env(): j2_env["undefined"] = import_string(j2_env["undefined"]) jinja_env = SandboxedEnvironment(**j2_env) jinja_env.filters = engines["jinja"].env.filters + jinja_env.filters["fix_interfaces"] = fix_interfaces return jinja_env -def render_jinja_template(obj, template): - """ - Helper function to render Jinja templates. - - Args: - obj (Device): The Device object from Nautobot. - template (str): A Jinja2 template to be rendered. - - Returns: - str: The ``template`` rendered. - - Raises: - NornirNautobotException: When there is an error rendering the ``template``. - """ - try: - return render_jinja2(template_code=template, context={"obj": obj}) - except jinja_errors.UndefinedError as error: - error_msg = ( - "`E3019:` Jinja encountered and UndefinedError`, check the template for missing variable definitions.\n" - f"Template:\n{template}\n" - f"Original Error: {error}" - ) - raise OnboardException(error_msg) from error - - except jinja_errors.TemplateSyntaxError as error: # Also catches subclass of TemplateAssertionError - error_msg = ( - f"`E3020:` Jinja encountered a SyntaxError at line number {error.lineno}," - f"check the template for invalid Jinja syntax.\nTemplate:\n{template}\n" - f"Original Error: {error}" - ) - raise OnboardException(error_msg) from error - # Intentionally not catching TemplateNotFound errors since template is passes as a string and not a filename - except jinja_errors.TemplateError as error: # Catches all remaining Jinja errors - error_msg = ( - "`E3021:` Jinja encountered an unexpected TemplateError; check the template for correctness\n" - f"Template:\n{template}\n" - f"Original Error: {error}" - ) - raise OnboardException(error_msg) from error +# def render_jinja_template(obj, template): +# """ +# Helper function to render Jinja templates. + +# Args: +# obj (Device): The Device object from Nautobot. +# template (str): A Jinja2 template to be rendered. + +# Returns: +# str: The ``template`` rendered. + +# Raises: +# NornirNautobotException: When there is an error rendering the ``template``. +# """ +# try: +# return render_jinja2(template_code=template, context={"obj": obj}) +# except jinja_errors.UndefinedError as error: +# error_msg = ( +# "`E3019:` Jinja encountered and UndefinedError`, check the template for missing variable definitions.\n" +# f"Template:\n{template}\n" +# f"Original Error: {error}" +# ) +# raise OnboardException(error_msg) from error + +# except jinja_errors.TemplateSyntaxError as error: # Also catches subclass of TemplateAssertionError +# error_msg = ( +# f"`E3020:` Jinja encountered a SyntaxError at line number {error.lineno}," +# f"check the template for invalid Jinja syntax.\nTemplate:\n{template}\n" +# f"Original Error: {error}" +# ) +# raise OnboardException(error_msg) from error +# # Intentionally not catching TemplateNotFound errors since template is passes as a string and not a filename +# except jinja_errors.TemplateError as error: # Catches all remaining Jinja errors +# error_msg = ( +# "`E3021:` Jinja encountered an unexpected TemplateError; check the template for correctness\n" +# f"Template:\n{template}\n" +# f"Original Error: {error}" +# ) +# raise OnboardException(error_msg) from error def load_yaml_datafile(filename): @@ -95,7 +99,7 @@ def extract_show_data(host, multi_result, command_getter_type): multi_result (multiResult): multiresult object from nornir command_getter_type (str): to know what dict to pull, device_onboarding or network_importer. """ - get_django_env() + jinja_env = get_django_env() host_platform = host.platform if host_platform == "cisco_xe": @@ -106,12 +110,18 @@ def extract_show_data(host, multi_result, command_getter_type): for default_dict_field, command_info in command_jpaths[command_getter_type].items(): if not default_dict_field == "use_textfsm": if command_info["command"] == multi_result[0].name: - j2_rendered_jpath = render_jinja_template(obj=host.name, template=command_info["jpath"]) + jpath_template = jinja_env.from_string(command_info["jpath"]) + j2_rendered_jpath = jpath_template.render({"obj": host.name}) + # j2_rendered_jpath = render_jinja_template(obj=host.name, template=command_info["jpath"]) extracted_value = extract_data_from_json(multi_result[0].result, j2_rendered_jpath) + # print(extracted_value) if command_info.get("post_processor"): - extracted_processed = render_jinja_template( - obj=extracted_value, template=command_info["post_processor"] - ) + template = jinja_env.from_string(command_info["post_processor"]) + extracted_processed = template.render({"obj": extracted_value}) + # extracted_processed = render_jinja_template( + # obj=extracted_value, template=command_info["post_processor"] + # ) + # print(extracted_processed) else: extracted_processed = extracted_value if isinstance(extracted_value, list) and len(extracted_value) == 1: diff --git a/nautobot_device_onboarding/utils/jinja_filters.py b/nautobot_device_onboarding/utils/jinja_filters.py new file mode 100755 index 00000000..e8ac5cb0 --- /dev/null +++ b/nautobot_device_onboarding/utils/jinja_filters.py @@ -0,0 +1,21 @@ +"""Filters for Jinja2 PostProcessing.""" + + +def fix_interfaces(interfaces): + """Prep interface formatting for SSoT.""" + for interface in interfaces: + for _, int_values in interface.items(): + int_values["type"] = "other" + int_values["802.1Q_mode"] = "" + int_values["untagged_vlan"] = "" + int_values["tagged_vlans"] = [] + int_values["lag"] = "" + int_values["ip_addresses"] = [] + int_values["ip_addresses"].append( + {"ip_address": int_values.get("ip_address", ""), "prefix_length": int_values.get("prefix_length", "")} + ) + if 'up' in int_values['link_status']: + int_values['link_status'] = True + else: + int_values['link_status'] = False + return interfaces From 2c7d70a6bab6beeeb5bcd69f18704c2328da52ce Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Thu, 22 Feb 2024 15:36:33 -0600 Subject: [PATCH 103/225] fix do for junos --- .../command_mappers/juniper_junos.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/juniper_junos.yml b/nautobot_device_onboarding/command_mappers/juniper_junos.yml index 53f3be46..8f24913c 100755 --- a/nautobot_device_onboarding/command_mappers/juniper_junos.yml +++ b/nautobot_device_onboarding/command_mappers/juniper_junos.yml @@ -1,18 +1,18 @@ --- device_onboarding: - use_textfsm: false + use_textfsm: true hostname: - command: "show version | display json" - jpath: "'software-information'[0].'host-name'[0].data" + command: "show version" + jpath: "[*].hostname" serial: - command: "show version | display json" - jpath: "'chassis-inventory'[0].'chassis'[0].'serial-number'[0].data" + command: "show version" + jpath: "[*].serial_number" device_type: - command: "show version | display json" - jpath: "'software-information'[0].'product-model'[0].data" + command: "show chassis hardware" + jpath: "[*].model" mgmt_interface: command: "show interfaces" - jpath: "'interface-information'[0].'logical-interface'[0].'address-family'[0].'interface-address'[0].'ifa-local'[0].data" + jpath: "[?local=='{{ obj }}'].interface" mask_length: command: "show interfaces" jpath: "[?local=='{{ obj }}'].destination" From 69418b423897549f4a02e4b5cb63aa76832fe584 Mon Sep 17 00:00:00 2001 From: David Cates Date: Thu, 22 Feb 2024 16:19:29 -0700 Subject: [PATCH 104/225] add csv support wip --- .../diffsync/adapters/onboarding_adapters.py | 32 ++- .../diffsync/mock_data.py | 8 +- nautobot_device_onboarding/jobs.py | 252 ++++++++++++++---- nautobot_device_onboarding/utils/formatter.py | 1 + 4 files changed, 229 insertions(+), 64 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index e38d893b..7a8c5d44 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -164,17 +164,21 @@ def _handle_failed_devices(self, device_data): def execute_command_getter(self): """Start the CommandGetterDO job to query devices for data.""" - if self.job.platform: - if not self.job.platform.network_driver: - self.job.logger.error( - f"The selected platform, {self.job.platform} " - "does not have a network driver, please update the Platform." - ) - raise Exception("Platform.network_driver missing") # pylint: disable=broad-exception-raised + if not self.job.processed_csv_data: + if self.job.platform: + if not self.job.platform.network_driver: + self.job.logger.error( + f"The selected platform, {self.job.platform} " + "does not have a network driver, please update the Platform." + ) + raise Exception("Platform.network_driver missing") # pylint: disable=broad-exception-raised command_getter_job = Job.objects.get(name="Command Getter for Device Onboarding") - job_kwargs = self.job.prepare_job_kwargs(self.job.job_result.task_kwargs) - kwargs = self.job.serialize_data(job_kwargs) + if self.job.processed_csv_data: + kwargs = self.job.job_result.task_kwargs + else: + job_kwargs = self.job.prepare_job_kwargs(self.job.job_result.task_kwargs) + kwargs = self.job.serialize_data(job_kwargs) result = JobResult.enqueue_job( job_model=command_getter_job, user=self.job.user, celery_kwargs=self.job.job_result.celery_kwargs, **kwargs ) @@ -268,14 +272,14 @@ def load_devices(self): onboarding_device = self.device( diffsync=self, device_type__model=self.device_data[ip_address]["device_type"], - location__name=self.job.location.name, + location__name=self.job.processed_csv_data[ip_address]["location"].name, name=self.device_data[ip_address]["hostname"], platform__name=self.device_data[ip_address]["platform"], primary_ip4__host=ip_address, - primary_ip4__status__name=self.job.ip_address_status.name, - role__name=self.job.device_role.name, - status__name=self.job.device_status.name, - secrets_group__name=self.job.secrets_group.name, + primary_ip4__status__name=self.job.processed_csv_data[ip_address]["ip_address_status"].name, + role__name=self.job.processed_csv_data[ip_address]["device_role"].name, + status__name=self.job.processed_csv_data[ip_address]["device_status"].name, + secrets_group__name=self.job.processed_csv_data[ip_address]["secrets_group"].name, interfaces=[self.device_data[ip_address]["mgmt_interface"]], mask_length=self.device_data[ip_address]["mask_length"], serial=self.device_data[ip_address]["serial"], diff --git a/nautobot_device_onboarding/diffsync/mock_data.py b/nautobot_device_onboarding/diffsync/mock_data.py index 57d08e61..ffaa19d3 100644 --- a/nautobot_device_onboarding/diffsync/mock_data.py +++ b/nautobot_device_onboarding/diffsync/mock_data.py @@ -98,7 +98,7 @@ "untagged_vlan": "", "tagged_vlans": [], } - } + }, ], }, "demo-cisco-xe2": { @@ -166,9 +166,9 @@ "untagged_vlan": "", "tagged_vlans": [], } - } - ] - } + }, + ], + }, } device_onboarding_mock_data = { diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 25890f26..9979649c 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -1,11 +1,14 @@ # pylint: disable=attribute-defined-outside-init """Device Onboarding Jobs.""" +import csv import logging +from io import StringIO from diffsync.enum import DiffSyncFlags from django.conf import settings -from nautobot.apps.jobs import BooleanVar, IntegerVar, Job, MultiObjectVar, ObjectVar, StringVar +from django.core.exceptions import ObjectDoesNotExist, ValidationError +from nautobot.apps.jobs import BooleanVar, FileVar, IntegerVar, Job, MultiObjectVar, ObjectVar, StringVar from nautobot.core.celery import register_jobs from nautobot.dcim.models import Device, DeviceType, Location, Platform from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices @@ -218,6 +221,8 @@ class SSOTDeviceOnboarding(DataSource): # pylint: disable=too-many-instance-att def __init__(self): """Initialize SSOTDeviceOnboarding.""" super().__init__() + self.processed_csv_data = {} + self.task_kwargs_csv_data = {} self.diffsync_flags = DiffSyncFlags.SKIP_UNMATCHED_DST class Meta: @@ -230,18 +235,23 @@ class Meta: default=False, description="Enable for more verbose logging.", ) + csv_file = FileVar( + label="CSV File", required=False, description="If a file is provided all other options will be ignored." + ) location = ObjectVar( model=Location, + required=False, query_params={"content_type": "dcim.device"}, description="Assigned Location for all synced device(s)", ) - namespace = ObjectVar(model=Namespace, description="Namespace ip addresses belong to.") + namespace = ObjectVar(model=Namespace, required=False, description="Namespace ip addresses belong to.") ip_addresses = StringVar( + required=False, description="IP address of the device to sync, specify in a comma separated list for multiple devices.", label="IPv4 addresses", ) - port = IntegerVar(default=22) - timeout = IntegerVar(default=30) + port = IntegerVar(required=False, default=22) + timeout = IntegerVar(required=False, default=30) management_only_interface = BooleanVar( default=False, label="Set Management Only", @@ -255,30 +265,30 @@ class Meta: device_role = ObjectVar( model=Role, query_params={"content_types": "dcim.device"}, - required=True, + required=False, description="Role to be applied to all synced devices.", ) device_status = ObjectVar( model=Status, query_params={"content_types": "dcim.device"}, - required=True, + required=False, description="Status to be applied to all synced devices.", ) interface_status = ObjectVar( model=Status, query_params={"content_types": "dcim.interface"}, - required=True, + required=False, description="Status to be applied to all new synced device interfaces. This value does not update with additional syncs.", ) ip_address_status = ObjectVar( label="IP address status", model=Status, query_params={"content_types": "ipam.ipaddress"}, - required=True, + required=False, description="Status to be applied to all new synced IP addresses. This value does not update with additional syncs.", ) secrets_group = ObjectVar( - model=SecretsGroup, required=True, description="SecretsGroup for device connection credentials." + model=SecretsGroup, required=False, description="SecretsGroup for device connection credentials." ) platform = ObjectVar( model=Platform, @@ -296,11 +306,116 @@ def load_target_adapter(self): self.target_adapter = OnboardingNautobotAdapter(job=self, sync=self.sync) self.target_adapter.load() + def _convert_sring_to_bool(self, string, header): + """Given a string of 'true' or 'false' convert to bool.""" + if string.lower() == "true": + return True + elif string.lower() == "false": + return False + else: + raise ValidationError( + f"'{string}' in column '{header}' failed to convert to a boolean value. " + "Please use either 'True' or 'False'." + ) + + def _process_csv_data(self, csv_file): + """ "Convert CSV data into a dictionary containing Nautobot objects.""" + self.logger.info("Decoding CSV file...") + decoded_csv_file = csv_file.read().decode("utf-8") + csv_reader = csv.DictReader(StringIO(decoded_csv_file)) + self.logger.info("Processing CSV data...") + processing_failed = False + processed_csv_data = {} + row_count = 1 + for row in csv_reader: + try: + query = f"device_role: {row.get('location_name')}" + if row.get("location_parent_name"): + location = Location.objects.get( + name=row["location_name"].strip(), parent__name=row["location_parent_name"].strip() + ) + else: + location = Location.objects.get(name=row["location_name"].strip(), parent=None) + query = f"device_role: {row.get('device_role_name')}" + device_role = Role.objects.get( + name=row["device_role_name"].strip(), + ) + query = f"device_status: {row.get('device_status_name')}" + device_status = Status.objects.get( + name=row["device_status_name"].strip(), + ) + query = f"interface_status: {row.get('interface_status_name')}" + interface_status = Status.objects.get( + name=row["interface_status_name"].strip(), + ) + query = f"ip_address_status: {row.get('ip_address_status_name')}" + ip_address_status = Status.objects.get( + name=row["ip_address_status_name"].strip(), + ) + query = f"secrets_group: {row.get('secrets_group_name')}" + secrets_group = SecretsGroup.objects.get( + name=row["secrets_group_name"].strip(), + ) + query = f"platform: {row.get('platform_name')}" + platform = None + if row.get("platform_name"): + platform = Platform.objects.get( + name=row["platform_name"].strip(), + ) + + set_mgmgt_only = self._convert_sring_to_bool( + string=row["set_mgmt_only"].lower().strip(), header="set_mgmt_only" + ) + update_devices_without_primary_ip = self._convert_sring_to_bool( + string=row["update_devices_without_primary_ip"].lower().strip(), + header="update_devices_without_primary_ip", + ) + + processed_csv_data[row["ip_address_host"]] = {} + processed_csv_data[row["ip_address_host"]]["location"] = location + processed_csv_data[row["ip_address_host"]]["port"] = row["port"].strip() + processed_csv_data[row["ip_address_host"]]["timeout"] = row["timeout"].strip() + processed_csv_data[row["ip_address_host"]]["set_mgmt_only"] = set_mgmgt_only + processed_csv_data[row["ip_address_host"]][ + "update_devices_without_primary_ip" + ] = update_devices_without_primary_ip + processed_csv_data[row["ip_address_host"]]["device_role"] = device_role + processed_csv_data[row["ip_address_host"]]["device_status"] = device_status + processed_csv_data[row["ip_address_host"]]["interface_status"] = interface_status + processed_csv_data[row["ip_address_host"]]["ip_address_status"] = ip_address_status + processed_csv_data[row["ip_address_host"]]["secrets_group"] = secrets_group + processed_csv_data[row["ip_address_host"]]["platform"] = platform + + # Prepare ids to send to the job in celery + self.task_kwargs_csv_data[row["ip_address_host"]] = {} + self.task_kwargs_csv_data[row["ip_address_host"]]["port"] = row["port"].strip() + self.task_kwargs_csv_data[row["ip_address_host"]]["timeout"] = row["timeout"].strip() + self.task_kwargs_csv_data[row["ip_address_host"]]["secrets_group"] = ( + secrets_group.id if secrets_group else "" + ) + self.task_kwargs_csv_data[row["ip_address_host"]]["platform"] = platform.id if platform else "" + row_count += 1 + except ObjectDoesNotExist as err: + self.logger.error(f"(row {sum([row_count, 1])}), {err} {query}") + processing_failed = True + except ValidationError as err: + self.logger.error(f"(row {sum([row_count, 1])}), {err}") + self.logger.error + row_count += 1 + if processing_failed: + processed_csv_data = None + if row_count <= 1: + self.logger.error("The CSV file is empty!") + processed_csv_data = None + + return processed_csv_data + def run( self, dryrun, memory_profiling, debug, + csv_file, location, namespace, ip_addresses, @@ -321,36 +436,53 @@ def run( self.dryrun = dryrun self.memory_profiling = memory_profiling self.debug = debug - self.location = location - self.namespace = namespace - self.ip_addresses = ip_addresses.replace(" ", "").split(",") - self.management_only_interface = management_only_interface - self.update_devices_without_primary_ip = update_devices_without_primary_ip - self.device_role = device_role - self.device_status = device_status - self.interface_status = interface_status - self.ip_address_status = ip_address_status - self.port = port - self.timeout = timeout - self.secrets_group = secrets_group - self.platform = platform - self.job_result.task_kwargs = { - "debug": debug, - "location": location, - "namespace": namespace, - "ip_addresses": ip_addresses, - "management_only_interface": management_only_interface, - "update_devices_without_primary_ip": update_devices_without_primary_ip, - "device_role": device_role, - "device_status": device_status, - "interface_status": interface_status, - "ip_address_status": ip_address_status, - "port": port, - "timeout": timeout, - "secrets_group": secrets_group, - "platform": platform, - } + if csv_file: + self.processed_csv_data = self._process_csv_data(csv_file=csv_file) + if self.processed_csv_data: + if self.debug: + self.logger.debug(self.processed_csv_data) + # create a list of ip addresses for processing in the adapter + self.ip_addresses = [] + for ip_address in self.processed_csv_data: + self.ip_addresses.append(ip_address) + # prepare the task_kwargs needed by the CommandGetterDO job + self.job_result.task_kwargs = {"debug": debug, "csv_file": self.task_kwargs_csv_data} + else: + raise ValidationError(mesage="CSV check failed. No devices will be onboarded.") + + else: + self.location = location + self.namespace = namespace + self.ip_addresses = ip_addresses.replace(" ", "").split(",") + self.management_only_interface = management_only_interface + self.update_devices_without_primary_ip = update_devices_without_primary_ip + self.device_role = device_role + self.device_status = device_status + self.interface_status = interface_status + self.ip_address_status = ip_address_status + self.port = port + self.timeout = timeout + self.secrets_group = secrets_group + self.platform = platform + + self.job_result.task_kwargs = { + "debug": debug, + "location": location, + "namespace": namespace, + "ip_addresses": ip_addresses, + "management_only_interface": management_only_interface, + "update_devices_without_primary_ip": update_devices_without_primary_ip, + "device_role": device_role, + "device_status": device_status, + "interface_status": interface_status, + "ip_address_status": ip_address_status, + "port": port, + "timeout": timeout, + "secrets_group": secrets_group, + "platform": platform, + "processed_csv_data": "", + } super().run(dryrun, memory_profiling, *args, **kwargs) @@ -509,6 +641,7 @@ class Meta: has_sensitive_variables = False hidden = False + csv_file = StringVar() debug = BooleanVar() ip_addresses = StringVar() port = IntegerVar() @@ -518,11 +651,17 @@ class Meta: def run(self, *args, **kwargs): """Process onboarding task from ssot-ni job.""" - self.ip_addresses = kwargs["ip_addresses"].replace(" ", "").split(",") - self.port = kwargs["port"] - self.timeout = kwargs["timeout"] - self.secrets_group = kwargs["secrets_group"] - self.platform = kwargs["platform"] + print(kwargs) + if kwargs["csv_file"]: + self.ip_addresses = [] + for ip_address in kwargs["csv_file"]: + self.ip_addresses.append(ip_address) + else: + self.ip_addresses = kwargs["ip_addresses"].replace(" ", "").split(",") + self.port = kwargs["port"] + self.timeout = kwargs["timeout"] + self.secrets_group = kwargs["secrets_group"] + self.platform = kwargs["platform"] # Initiate Nornir instance with empty inventory try: @@ -537,9 +676,30 @@ def run(self, *args, **kwargs): ) as nornir_obj: nr_with_processors = nornir_obj.with_processors([ProcessorDO(logger, compiled_results)]) for entered_ip in self.ip_addresses: - single_host_inventory_constructed = _set_inventory( - entered_ip, self.platform, self.port, self.secrets_group - ) + if kwargs["csv_file"]: + # get platform if one was provided via csv + platform = None + platform_id = kwargs["csv_file"][entered_ip]["platform"] + if platform_id: + platform = Platform.objects.get(id=platform_id) + + # get secrets group if one was provided via csv + secrets_group = None + secrets_group_id = kwargs["csv_file"][entered_ip]["secrets_group"] + if secrets_group_id: + secrets_group = SecretsGroup.objects.get(id=secrets_group_id) + + single_host_inventory_constructed = _set_inventory( + host_ip=entered_ip, + platform=platform, + port=kwargs["csv_file"][entered_ip]["port"], + secrets_group=secrets_group, + ) + else: + single_host_inventory_constructed = _set_inventory( + entered_ip, self.platform, self.port, self.secrets_group + ) + nr_with_processors.inventory.hosts.update(single_host_inventory_constructed) nr_with_processors.run(task=netmiko_send_commands, command_getter_job="device_onboarding") except Exception as err: # pylint: disable=broad-exception-caught diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index f5c5d91f..a7ad2f4c 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -9,6 +9,7 @@ from jinja2 import exceptions as jinja_errors from jinja2.sandbox import SandboxedEnvironment from nautobot.core.utils.data import render_jinja2 + from nautobot_device_onboarding.exceptions import OnboardException DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), "command_mappers")) From b6a8021417cbf768198a3a57f68c0a5b5cded7a2 Mon Sep 17 00:00:00 2001 From: David Cates Date: Thu, 22 Feb 2024 17:57:08 -0700 Subject: [PATCH 105/225] update csv import feature --- .../diffsync/models/onboarding_models.py | 112 +++++++++++++----- nautobot_device_onboarding/jobs.py | 16 ++- .../utils/diffsync_utils.py | 18 ++- 3 files changed, 112 insertions(+), 34 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/models/onboarding_models.py b/nautobot_device_onboarding/diffsync/models/onboarding_models.py index 2b8cdbb6..9ef041e7 100644 --- a/nautobot_device_onboarding/diffsync/models/onboarding_models.py +++ b/nautobot_device_onboarding/diffsync/models/onboarding_models.py @@ -60,20 +60,25 @@ def _get_or_create_device(cls, platform, diffsync, ids, attrs): # device being created, but the primary ip address doesn't match an ip address entered, # the matching device will be updated or skipped based on user preference. - device = Device.objects.get( - name=ids["name"], - location=diffsync.job.location, + location = diffsync_utils.retrieve_submitted_value( + job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="location" + ) + device = Device.objects.get(name=ids["name"], location=location) + update_devices_without_primary_ip = location = diffsync_utils.retrieve_submitted_value( + job=diffsync.job, + ip_address=attrs["primary_ip4__host"], + query_string="update_devices_without_primary_ip", ) - if diffsync.job.update_devices_without_primary_ip: + if update_devices_without_primary_ip: diffsync.job.logger.warning( - f"Device {ids['name']} at location {diffsync.job.location} already exists in Nautobot " + f"Device {ids['name']} at location {location} already exists in Nautobot " "but the primary ip address either does not exist, or doesn't match an entered ip address. " "This device will be updated." ) device = cls._update_device_with_attrs(device, platform, ids, attrs, diffsync) else: diffsync.job.logger.warning( - f"Device {ids['name']} at location {diffsync.job.location} already exists in Nautobot " + f"Device {ids['name']} at location {location} already exists in Nautobot " "but the primary ip address either does not exist, or doesn't match an entered ip address. " "IP Address, this device will be skipped." ) @@ -82,13 +87,19 @@ def _get_or_create_device(cls, platform, diffsync, ids, attrs): except ObjectDoesNotExist: # Create Device device = Device( - location=diffsync.job.location, - status=diffsync.job.device_status, - role=diffsync.job.device_role, + location=location, + status=diffsync_utils.retrieve_submitted_value( + job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="device_status" + ), + role=diffsync_utils.retrieve_submitted_value( + job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="device_role" + ), device_type=DeviceType.objects.get(model=attrs["device_type__model"]), name=ids["name"], platform=platform, - secrets_group=diffsync.job.secrets_group, + secrets_group=diffsync_utils.retrieve_submitted_value( + job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="secrets_group" + ), serial=ids["serial"], ) device.validated_save() @@ -107,8 +118,12 @@ def _get_or_create_interface(cls, diffsync, device, attrs): try: device_interface = Interface.objects.create( name=attrs["interfaces"][0], - mgmt_only=diffsync.job.management_only_interface, - status=diffsync.job.interface_status, + mgmt_only=diffsync_utils.retrieve_submitted_value( + job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="set_mgmt_only" + ), + status=diffsync_utils.retrieve_submitted_value( + job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="interface_status" + ), type=InterfaceTypeChoices.TYPE_OTHER, device=device, ) @@ -119,12 +134,28 @@ def _get_or_create_interface(cls, diffsync, device, attrs): @classmethod def _update_device_with_attrs(cls, device, platform, ids, attrs, diffsync): """Update a Nautobot device instance.""" - device.location = diffsync.job.location - device.status = diffsync.job.device_status - device.role = diffsync.job.device_role + device.location = ( + diffsync_utils.retrieve_submitted_value( + job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="location" + ), + ) + device.status = ( + diffsync_utils.retrieve_submitted_value( + job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="device_status" + ), + ) + device.role = ( + diffsync_utils.retrieve_submitted_value( + job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="device_role" + ), + ) device.device_type = DeviceType.objects.get(model=attrs["device_type__model"]) device.platform = platform - device.secrets_group = diffsync.job.secrets_group + device.secrets_group = ( + diffsync_utils.retrieve_submitted_value( + job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="secrets_group" + ), + ) device.serial = ids["serial"] return device @@ -134,8 +165,17 @@ def create(cls, diffsync, ids, attrs): """Create a new nautobot device using data scraped from a device.""" # Determine device platform platform = None - if diffsync.job.platform: - platform = diffsync.job.platform + if diffsync_utils.retrieve_submitted_value( + job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="platform" + ): + + platform = diffsync_utils.retrieve_submitted_value( + job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="platform" + ) + + ip_address = attrs["primary_ip4__host"], latform = diffsync_utils.retrieve_submitted_value( + job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="platform" + ) else: platform = Platform.objects.get(name=attrs["platform__name"]) @@ -145,9 +185,15 @@ def create(cls, diffsync, ids, attrs): ip_address = diffsync_utils.get_or_create_ip_address( host=attrs["primary_ip4__host"], mask_length=attrs["mask_length"], - namespace=diffsync.job.namespace, - default_ip_status=diffsync.job.ip_address_status, - default_prefix_status=diffsync.job.ip_address_status, + namespace=diffsync_utils.retrieve_submitted_value( + job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="namespace" + ), + default_ip_status=diffsync_utils.retrieve_submitted_value( + job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="ip_address_status" + ), + default_prefix_status=diffsync_utils.retrieve_submitted_value( + job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="ip_address_status" + ), job=diffsync.job, ) interface = cls._get_or_create_interface(diffsync=diffsync, device=device, attrs=attrs) @@ -197,9 +243,15 @@ def update(self, attrs): ip_address = diffsync_utils.get_or_create_ip_address( host=attrs["primary_ip4__host"], mask_length=attrs["mask_length"], - namespace=self.diffsync.job.namespace, - default_ip_status=self.diffsync.job.ip_address_status, - default_prefix_status=self.diffsync.job.ip_address_status, + namespace=self.diffsync_utils.retrieve_submitted_value( + job=self.job, ip_address=attrs["primary_ip4__host"], query_string="namespace" + ), + default_ip_status=self.diffsync_utils.retrieve_submitted_value( + job=self.job, ip_address=attrs["primary_ip4__host"], query_string="ip_address_status" + ), + default_prefix_status=self.diffsync_utils.retrieve_submitted_value( + job=self.job, ip_address=attrs["primary_ip4__host"], query_string="ip_address_status" + ), job=self.diffsync.job, ) interface.ip_addresses.add(ip_address) @@ -241,9 +293,15 @@ def update(self, attrs): ip_address = diffsync_utils.get_or_create_ip_address( host=attrs["primary_ip4__host"], mask_length=attrs["mask_length"], - namespace=self.diffsync.job.namespace, - default_ip_status=self.diffsync.job.ip_address_status, - default_prefix_status=self.diffsync.job.ip_address_status, + namespace=self.diffsync_utils.retrieve_submitted_value( + job=self.job, ip_address=attrs["primary_ip4__host"], query_string="namespace" + ), + default_ip_status=self.diffsync_utils.retrieve_submitted_value( + job=self.job, ip_address=attrs["primary_ip4__host"], query_string="ip_address_status" + ), + default_prefix_status=self.diffsync_utils.retrieve_submitted_value( + job=self.job, ip_address=attrs["primary_ip4__host"], query_string="ip_address_status" + ), job=self.diffsync.job, ) interface = Interface.objects.get( diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 9979649c..7e90260a 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -11,6 +11,7 @@ from nautobot.apps.jobs import BooleanVar, FileVar, IntegerVar, Job, MultiObjectVar, ObjectVar, StringVar from nautobot.core.celery import register_jobs from nautobot.dcim.models import Device, DeviceType, Location, Platform +from nautobot.ipam.models import Namespace from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from nautobot.extras.models import Role, SecretsGroup, SecretsGroupAssociation, Status, Tag from nautobot.ipam.models import Namespace @@ -236,7 +237,7 @@ class Meta: description="Enable for more verbose logging.", ) csv_file = FileVar( - label="CSV File", required=False, description="If a file is provided all other options will be ignored." + label="CSV File", required=False, description="If a file is provided all the options below will be ignored." ) location = ObjectVar( model=Location, @@ -252,7 +253,7 @@ class Meta: ) port = IntegerVar(required=False, default=22) timeout = IntegerVar(required=False, default=30) - management_only_interface = BooleanVar( + set_mgmt_only = BooleanVar( default=False, label="Set Management Only", description="If True, new interfaces that are created will be set to management only. If False, new interfaces will be set to not be management only.", @@ -340,6 +341,10 @@ def _process_csv_data(self, csv_file): device_role = Role.objects.get( name=row["device_role_name"].strip(), ) + query = f"namespace: {row.get('namespace')}" + namespace = Namespace.objects.get( + name=row["namespace"].strip(), + ) query = f"device_status: {row.get('device_status_name')}" device_status = Status.objects.get( name=row["device_status_name"].strip(), @@ -373,6 +378,7 @@ def _process_csv_data(self, csv_file): processed_csv_data[row["ip_address_host"]] = {} processed_csv_data[row["ip_address_host"]]["location"] = location + processed_csv_data[row["ip_address_host"]]["namespace"] = namespace processed_csv_data[row["ip_address_host"]]["port"] = row["port"].strip() processed_csv_data[row["ip_address_host"]]["timeout"] = row["timeout"].strip() processed_csv_data[row["ip_address_host"]]["set_mgmt_only"] = set_mgmgt_only @@ -419,7 +425,7 @@ def run( location, namespace, ip_addresses, - management_only_interface, + set_mgmt_only, update_devices_without_primary_ip, device_role, device_status, @@ -455,7 +461,7 @@ def run( self.location = location self.namespace = namespace self.ip_addresses = ip_addresses.replace(" ", "").split(",") - self.management_only_interface = management_only_interface + self.set_mgmt_only = set_mgmt_only self.update_devices_without_primary_ip = update_devices_without_primary_ip self.device_role = device_role self.device_status = device_status @@ -471,7 +477,7 @@ def run( "location": location, "namespace": namespace, "ip_addresses": ip_addresses, - "management_only_interface": management_only_interface, + "set_mgmt_only": set_mgmt_only, "update_devices_without_primary_ip": update_devices_without_primary_ip, "device_role": device_role, "device_status": device_status, diff --git a/nautobot_device_onboarding/utils/diffsync_utils.py b/nautobot_device_onboarding/utils/diffsync_utils.py index 4c927efe..2e86d723 100644 --- a/nautobot_device_onboarding/utils/diffsync_utils.py +++ b/nautobot_device_onboarding/utils/diffsync_utils.py @@ -30,7 +30,7 @@ def check_data_type(data): def get_or_create_prefix(host, mask_length, default_status, namespace, job=None): - """Attempt to get a Nautobot Prefix, create a new one if necessary.""" + """Attempt to get a Nautobot Prefix, and create a new one if necessary.""" prefix = None new_network = ipaddress.ip_interface(f"{host}/{mask_length}") try: @@ -54,7 +54,7 @@ def get_or_create_prefix(host, mask_length, default_status, namespace, job=None) def get_or_create_ip_address(host, mask_length, namespace, default_ip_status, default_prefix_status, job=None): - """Attempt to get a Nautobot IPAddress, create a new one if necessary.""" + """Attempt to get a Nautobot IPAddress, and create a new one if necessary.""" ip_address = None try: @@ -88,3 +88,17 @@ def get_or_create_ip_address(host, mask_length, namespace, default_ip_status, de if job: job.logger.error(f"IP Address {host} failed to create, {err}") return ip_address + + +def retrieve_submitted_value(job, ip_address, query_string): + """ + Check for a submitted CSV file and retrieve a the appropriate user submitted value. + + If a user has submitted a CSV file, return the relevant value based on the data + that was parsed when the file was loaded. If a CSV file has not been submitted, + return the value input into the job form. + """ + if job.processed_csv_data: + return job.processed_csv_data[ip_address][query_string] + else: + getattr(job, "query_string") From 2915b51068f0d6bc42b548ee517c456744f92aa2 Mon Sep 17 00:00:00 2001 From: David Cates Date: Fri, 23 Feb 2024 08:03:43 -0700 Subject: [PATCH 106/225] update csv support --- nautobot_device_onboarding/jobs.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 7e90260a..c4af2876 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -404,6 +404,7 @@ def _process_csv_data(self, csv_file): except ObjectDoesNotExist as err: self.logger.error(f"(row {sum([row_count, 1])}), {err} {query}") processing_failed = True + row_count += 1 except ValidationError as err: self.logger.error(f"(row {sum([row_count, 1])}), {err}") self.logger.error @@ -455,7 +456,7 @@ def run( # prepare the task_kwargs needed by the CommandGetterDO job self.job_result.task_kwargs = {"debug": debug, "csv_file": self.task_kwargs_csv_data} else: - raise ValidationError(mesage="CSV check failed. No devices will be onboarded.") + raise ValidationError(message="CSV check failed. No devices will be onboarded.") else: self.location = location From cb4d10c4b9d99be5dd4e56e740d55943e7ab68d3 Mon Sep 17 00:00:00 2001 From: David Cates Date: Fri, 23 Feb 2024 09:36:10 -0700 Subject: [PATCH 107/225] update csv support --- .../diffsync/adapters/onboarding_adapters.py | 26 +++++++++++++++++++ .../diffsync/models/onboarding_models.py | 2 ++ nautobot_device_onboarding/jobs.py | 13 +++++----- 3 files changed, 34 insertions(+), 7 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index 7a8c5d44..1921a7d6 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -1,10 +1,14 @@ """DiffSync adapters.""" import time +from collections import defaultdict +from typing import Dict, FrozenSet, Hashable, Tuple, Type import diffsync import netaddr +from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ValidationError +from django.db.models import Model from nautobot.apps.choices import JobResultStatusChoices from nautobot.dcim.models import Device, DeviceType, Manufacturer, Platform from nautobot.extras.models import Job, JobResult @@ -12,6 +16,8 @@ from nautobot_device_onboarding.diffsync.models import onboarding_models from nautobot_device_onboarding.utils import diffsync_utils +ParameterSet = FrozenSet[Tuple[str, Hashable]] + class OnboardingNautobotAdapter(diffsync.DiffSync): """Adapter for loading Nautobot data.""" @@ -28,6 +34,26 @@ def __init__(self, job, sync, *args, **kwargs): super().__init__(*args, **kwargs) self.job = job self.sync = sync + self.invalidate_cache() + + def invalidate_cache(self, zero_out_hits=True): + """Invalidates all the objects in the ORM cache.""" + self._cache = defaultdict(dict) + if zero_out_hits: + self._cache_hits = defaultdict(int) + + def get_from_orm_cache(self, parameters: Dict, model_class: Type[Model]): + """Retrieve an object from the ORM or the cache.""" + parameter_set = frozenset(parameters.items()) + content_type = ContentType.objects.get_for_model(model_class) + model_cache_key = f"{content_type.app_label}.{content_type.model}" + if cached_object := self._cache[model_cache_key].get(parameter_set): + self._cache_hits[model_cache_key] += 1 + return cached_object + # As we are using `get` here, this will error if there is not exactly one object that corresponds to the + # parameter set. We intentionally pass these errors through. + self._cache[model_cache_key][parameter_set] = model_class.objects.get(**dict(parameter_set)) + return self._cache[model_cache_key][parameter_set] def load_manufacturers(self): """Load manufacturer data from Nautobot.""" diff --git a/nautobot_device_onboarding/diffsync/models/onboarding_models.py b/nautobot_device_onboarding/diffsync/models/onboarding_models.py index 9ef041e7..4c783b3e 100644 --- a/nautobot_device_onboarding/diffsync/models/onboarding_models.py +++ b/nautobot_device_onboarding/diffsync/models/onboarding_models.py @@ -163,6 +163,8 @@ def _update_device_with_attrs(cls, device, platform, ids, attrs, diffsync): @classmethod def create(cls, diffsync, ids, attrs): """Create a new nautobot device using data scraped from a device.""" + if diffsync.job.debug: + diffsync.job.debug.logger.debug("Creating device {ids}") # Determine device platform platform = None if diffsync_utils.retrieve_submitted_value( diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index c4af2876..291d0f12 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -11,7 +11,6 @@ from nautobot.apps.jobs import BooleanVar, FileVar, IntegerVar, Job, MultiObjectVar, ObjectVar, StringVar from nautobot.core.celery import register_jobs from nautobot.dcim.models import Device, DeviceType, Location, Platform -from nautobot.ipam.models import Namespace from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from nautobot.extras.models import Role, SecretsGroup, SecretsGroupAssociation, Status, Tag from nautobot.ipam.models import Namespace @@ -380,7 +379,7 @@ def _process_csv_data(self, csv_file): processed_csv_data[row["ip_address_host"]]["location"] = location processed_csv_data[row["ip_address_host"]]["namespace"] = namespace processed_csv_data[row["ip_address_host"]]["port"] = row["port"].strip() - processed_csv_data[row["ip_address_host"]]["timeout"] = row["timeout"].strip() + processed_csv_data[row["ip_address_host"]]["timeout"] = int(row["timeout"].strip()) processed_csv_data[row["ip_address_host"]]["set_mgmt_only"] = set_mgmgt_only processed_csv_data[row["ip_address_host"]][ "update_devices_without_primary_ip" @@ -648,11 +647,11 @@ class Meta: has_sensitive_variables = False hidden = False - csv_file = StringVar() - debug = BooleanVar() - ip_addresses = StringVar() - port = IntegerVar() - timeout = IntegerVar() + csv_file = StringVar(required=False) + debug = BooleanVar(required=False) + ip_addresses = StringVar(required=False) + port = IntegerVar(required=False) + timeout = IntegerVar(required=False) secrets_group = ObjectVar(model=SecretsGroup) platform = ObjectVar(model=Platform, required=False) From 76a253da64067ef8887557b13e13fc2e9156d5d3 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Fri, 23 Feb 2024 11:02:14 -0600 Subject: [PATCH 108/225] fix dup ip address extraction --- nautobot_device_onboarding/command_mappers/cisco_ios.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/command_mappers/cisco_ios.yml b/nautobot_device_onboarding/command_mappers/cisco_ios.yml index c42ce447..1f720973 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_ios.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_ios.yml @@ -13,10 +13,12 @@ device_onboarding: jpath: "[*].hardware[0]" mgmt_interface: command: "show interfaces" - jpath: "[?ip_address=='{{ obj }}'].interface" + jpath: "[?ip_address=='{{ obj }}'].{name: interface, enabled: link_status}" + post_processor: "{% for interface in obj %}{% if 'up' in interface['enabled'] %}{{ [interface.name] }}{% endif %}{% endfor %}" mask_length: command: "show interfaces" jpath: "[?ip_address=='{{ obj }}'].prefix_length" + post_processor: "{{ obj | unique | list }}" network_importer: use_textfsm: true serial: From bcd179fc94022932fb9c7a1b8f6365cc9ad08be6 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Fri, 23 Feb 2024 11:07:55 -0600 Subject: [PATCH 109/225] fix mgmt_interface from list to string --- nautobot_device_onboarding/command_mappers/cisco_ios.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/cisco_ios.yml b/nautobot_device_onboarding/command_mappers/cisco_ios.yml index 1f720973..49ff0004 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_ios.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_ios.yml @@ -14,11 +14,11 @@ device_onboarding: mgmt_interface: command: "show interfaces" jpath: "[?ip_address=='{{ obj }}'].{name: interface, enabled: link_status}" - post_processor: "{% for interface in obj %}{% if 'up' in interface['enabled'] %}{{ [interface.name] }}{% endif %}{% endfor %}" + post_processor: "{% for interface in obj %}{% if 'up' in interface['enabled'] %}{{ interface.name }}{% endif %}{% endfor %}" mask_length: command: "show interfaces" jpath: "[?ip_address=='{{ obj }}'].prefix_length" - post_processor: "{{ obj | unique | list }}" + post_processor: "{{ obj | unique | first }}" network_importer: use_textfsm: true serial: From d47db2e663d18a70991871bd4563ee7eade2c100 Mon Sep 17 00:00:00 2001 From: David Cates Date: Fri, 23 Feb 2024 10:55:18 -0700 Subject: [PATCH 110/225] update csv support --- .../adapters/network_importer_adapters.py | 2 +- .../diffsync/adapters/onboarding_adapters.py | 8 +++-- nautobot_device_onboarding/jobs.py | 31 +++++++++++++++++-- 3 files changed, 35 insertions(+), 6 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index b14f3d16..2d475f19 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -409,7 +409,7 @@ def load_lag_to_interface(self): def load(self): """Load network data.""" - # self.execute_command_getter() + self.execute_command_getter() self.load_ip_addresses() if self.job.sync_vlans: self.load_vlans() diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index 1921a7d6..76114d0b 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -2,7 +2,7 @@ import time from collections import defaultdict -from typing import Dict, FrozenSet, Hashable, Tuple, Type +from typing import Dict, FrozenSet, DefaultDict, Hashable, Tuple, Type import diffsync import netaddr @@ -29,6 +29,10 @@ class OnboardingNautobotAdapter(diffsync.DiffSync): top_level = ["manufacturer", "platform", "device_type", "device"] + # This dictionary acts as an ORM cache. + _cache: DefaultDict[str, Dict[ParameterSet, Model]] + _cache_hits: DefaultDict[str, int] = defaultdict(int) + def __init__(self, job, sync, *args, **kwargs): """Initialize the OnboardingNautobotAdapter.""" super().__init__(*args, **kwargs) @@ -307,7 +311,7 @@ def load_devices(self): status__name=self.job.processed_csv_data[ip_address]["device_status"].name, secrets_group__name=self.job.processed_csv_data[ip_address]["secrets_group"].name, interfaces=[self.device_data[ip_address]["mgmt_interface"]], - mask_length=self.device_data[ip_address]["mask_length"], + mask_length=int(self.device_data[ip_address]["mask_length"]), serial=self.device_data[ip_address]["serial"], ) # type: ignore try: diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 291d0f12..eba53af2 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -378,7 +378,7 @@ def _process_csv_data(self, csv_file): processed_csv_data[row["ip_address_host"]] = {} processed_csv_data[row["ip_address_host"]]["location"] = location processed_csv_data[row["ip_address_host"]]["namespace"] = namespace - processed_csv_data[row["ip_address_host"]]["port"] = row["port"].strip() + processed_csv_data[row["ip_address_host"]]["port"] = int(row["port"].strip()) processed_csv_data[row["ip_address_host"]]["timeout"] = int(row["timeout"].strip()) processed_csv_data[row["ip_address_host"]]["set_mgmt_only"] = set_mgmgt_only processed_csv_data[row["ip_address_host"]][ @@ -393,8 +393,8 @@ def _process_csv_data(self, csv_file): # Prepare ids to send to the job in celery self.task_kwargs_csv_data[row["ip_address_host"]] = {} - self.task_kwargs_csv_data[row["ip_address_host"]]["port"] = row["port"].strip() - self.task_kwargs_csv_data[row["ip_address_host"]]["timeout"] = row["timeout"].strip() + self.task_kwargs_csv_data[row["ip_address_host"]]["port"] = int(row["port"].strip()) + self.task_kwargs_csv_data[row["ip_address_host"]]["timeout"] = int(row["timeout"].strip()) self.task_kwargs_csv_data[row["ip_address_host"]]["secrets_group"] = ( secrets_group.id if secrets_group else "" ) @@ -458,6 +458,31 @@ def run( raise ValidationError(message="CSV check failed. No devices will be onboarded.") else: + # Verify that all requried form inputs have been provided + required_inputs = { + "location": location, + "namespace": namespace, + "ip_addresses": ip_addresses, + "device_role": device_role, + "device_status": device_status, + "interface_status": interface_status, + "ip_address_status": ip_address_status, + "port": port, + "timeout": timeout, + "secrets_group": secrets_group + } + # missing_required_inputs = [] + # for form_field, input_value in required_inputs: + # if not input_value: + # missing_required_inputs.append(form_field) + + missing_required_inputs = [form_field for form_field, input_value in required_inputs.items() if not input_value] + if not missing_required_inputs: + pass + else: + self.logger.error(f"Missing requried inputs from job form: {missing_required_inputs}") + raise ValidationError(message=f"Missing required inputs {missing_required_inputs}") + self.location = location self.namespace = namespace self.ip_addresses = ip_addresses.replace(" ", "").split(",") From bea98a28d19795b1dc13d545fae439d6c2fdb176 Mon Sep 17 00:00:00 2001 From: David Cates Date: Fri, 23 Feb 2024 11:13:17 -0700 Subject: [PATCH 111/225] add orm caching method to onboarding adapter --- .../adapters/network_importer_adapters.py | 2 +- .../diffsync/adapters/onboarding_adapters.py | 30 +++++++++++++++++++ 2 files changed, 31 insertions(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index b14f3d16..2d475f19 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -409,7 +409,7 @@ def load_lag_to_interface(self): def load(self): """Load network data.""" - # self.execute_command_getter() + self.execute_command_getter() self.load_ip_addresses() if self.job.sync_vlans: self.load_vlans() diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index e38d893b..e72d3f98 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -1,10 +1,14 @@ """DiffSync adapters.""" import time +from collections import defaultdict +from typing import Dict, FrozenSet, DefaultDict, Hashable, Tuple, Type import diffsync import netaddr +from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ValidationError +from django.db.models import Model from nautobot.apps.choices import JobResultStatusChoices from nautobot.dcim.models import Device, DeviceType, Manufacturer, Platform from nautobot.extras.models import Job, JobResult @@ -12,6 +16,8 @@ from nautobot_device_onboarding.diffsync.models import onboarding_models from nautobot_device_onboarding.utils import diffsync_utils +ParameterSet = FrozenSet[Tuple[str, Hashable]] + class OnboardingNautobotAdapter(diffsync.DiffSync): """Adapter for loading Nautobot data.""" @@ -23,11 +29,35 @@ class OnboardingNautobotAdapter(diffsync.DiffSync): top_level = ["manufacturer", "platform", "device_type", "device"] + # This dictionary acts as an ORM cache. + _cache: DefaultDict[str, Dict[ParameterSet, Model]] + _cache_hits: DefaultDict[str, int] = defaultdict(int) + def __init__(self, job, sync, *args, **kwargs): """Initialize the OnboardingNautobotAdapter.""" super().__init__(*args, **kwargs) self.job = job self.sync = sync + self.invalidate_cache() + + def invalidate_cache(self, zero_out_hits=True): + """Invalidates all the objects in the ORM cache.""" + self._cache = defaultdict(dict) + if zero_out_hits: + self._cache_hits = defaultdict(int) + + def get_from_orm_cache(self, parameters: Dict, model_class: Type[Model]): + """Retrieve an object from the ORM or the cache.""" + parameter_set = frozenset(parameters.items()) + content_type = ContentType.objects.get_for_model(model_class) + model_cache_key = f"{content_type.app_label}.{content_type.model}" + if cached_object := self._cache[model_cache_key].get(parameter_set): + self._cache_hits[model_cache_key] += 1 + return cached_object + # As we are using `get` here, this will error if there is not exactly one object that corresponds to the + # parameter set. We intentionally pass these errors through. + self._cache[model_cache_key][parameter_set] = model_class.objects.get(**dict(parameter_set)) + return self._cache[model_cache_key][parameter_set] def load_manufacturers(self): """Load manufacturer data from Nautobot.""" From 0476cada1c8e1f8289d6ae1db3809bb92436dae9 Mon Sep 17 00:00:00 2001 From: David Cates Date: Fri, 23 Feb 2024 11:31:10 -0700 Subject: [PATCH 112/225] update csv support --- nautobot_device_onboarding/jobs.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index eba53af2..639c444c 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -471,11 +471,7 @@ def run( "timeout": timeout, "secrets_group": secrets_group } - # missing_required_inputs = [] - # for form_field, input_value in required_inputs: - # if not input_value: - # missing_required_inputs.append(form_field) - + missing_required_inputs = [form_field for form_field, input_value in required_inputs.items() if not input_value] if not missing_required_inputs: pass @@ -512,7 +508,7 @@ def run( "timeout": timeout, "secrets_group": secrets_group, "platform": platform, - "processed_csv_data": "", + "csv_file": "", } super().run(dryrun, memory_profiling, *args, **kwargs) From bde59f6cf5e84310d632d38c0d90a9f3d0a57898 Mon Sep 17 00:00:00 2001 From: David Cates Date: Fri, 23 Feb 2024 11:33:03 -0700 Subject: [PATCH 113/225] remove print statement --- nautobot_device_onboarding/jobs.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 639c444c..831e7529 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -678,7 +678,6 @@ class Meta: def run(self, *args, **kwargs): """Process onboarding task from ssot-ni job.""" - print(kwargs) if kwargs["csv_file"]: self.ip_addresses = [] for ip_address in kwargs["csv_file"]: From db7165b1ff0417f79a418f059cdf95002f1d49eb Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Fri, 23 Feb 2024 12:58:02 -0600 Subject: [PATCH 114/225] send interfaces in ios as json --- nautobot_device_onboarding/command_mappers/cisco_ios.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/command_mappers/cisco_ios.yml b/nautobot_device_onboarding/command_mappers/cisco_ios.yml index 49ff0004..8497f36d 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_ios.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_ios.yml @@ -27,4 +27,4 @@ network_importer: interfaces: command: "show interfaces" jpath: "[*].[$interface$,hardware_type,ip_address,prefix_length,mac_address,mtu,description,link_status,vlan_id,vlan_id_inner,vlan_id_outer]" - post_processor: "{{ obj | fix_interfaces }}" + post_processor: "{{ obj | fix_interfaces | tojson }}" From 0bdd25dda1d134d7a49e6939832ce40718ed72e9 Mon Sep 17 00:00:00 2001 From: David Cates Date: Fri, 23 Feb 2024 13:57:26 -0700 Subject: [PATCH 115/225] update csv support --- .../diffsync/adapters/onboarding_adapters.py | 32 +++++++++++++++---- .../diffsync/models/onboarding_models.py | 24 +++----------- .../utils/diffsync_utils.py | 5 ++- 3 files changed, 35 insertions(+), 26 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index 76114d0b..2563af56 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -299,17 +299,37 @@ def load_devices(self): try: if self.job.debug: self.job.logger.debug(f"loading device data for {ip_address}") + + location = diffsync_utils.retrieve_submitted_value( + job=self.job, ip_address=ip_address, query_string="location" + ) + platform = diffsync_utils.retrieve_submitted_value( + job=self.job, ip_address=ip_address, query_string="platform" + ) + primary_ip4__status = diffsync_utils.retrieve_submitted_value( + job=self.job, ip_address=ip_address, query_string="ip_address_status" + ) + device_role = diffsync_utils.retrieve_submitted_value( + job=self.job, ip_address=ip_address, query_string="device_role" + ) + device_status = diffsync_utils.retrieve_submitted_value( + job=self.job, ip_address=ip_address, query_string="device_status" + ) + secrets_group = diffsync_utils.retrieve_submitted_value( + job=self.job, ip_address=ip_address, query_string="secrets_group" + ) + onboarding_device = self.device( diffsync=self, device_type__model=self.device_data[ip_address]["device_type"], - location__name=self.job.processed_csv_data[ip_address]["location"].name, + location__name=location.name, name=self.device_data[ip_address]["hostname"], - platform__name=self.device_data[ip_address]["platform"], + platform__name=platform.name if platform else self.device_data[ip_address]["platform"], primary_ip4__host=ip_address, - primary_ip4__status__name=self.job.processed_csv_data[ip_address]["ip_address_status"].name, - role__name=self.job.processed_csv_data[ip_address]["device_role"].name, - status__name=self.job.processed_csv_data[ip_address]["device_status"].name, - secrets_group__name=self.job.processed_csv_data[ip_address]["secrets_group"].name, + primary_ip4__status__name=primary_ip4__status.name, + role__name=device_role.name, + status__name=device_status.name, + secrets_group__name=secrets_group.name, interfaces=[self.device_data[ip_address]["mgmt_interface"]], mask_length=int(self.device_data[ip_address]["mask_length"]), serial=self.device_data[ip_address]["serial"], diff --git a/nautobot_device_onboarding/diffsync/models/onboarding_models.py b/nautobot_device_onboarding/diffsync/models/onboarding_models.py index 4c783b3e..c7b7f6d6 100644 --- a/nautobot_device_onboarding/diffsync/models/onboarding_models.py +++ b/nautobot_device_onboarding/diffsync/models/onboarding_models.py @@ -51,7 +51,7 @@ class OnboardingDevice(DiffSyncModel): interfaces: Optional[list] @classmethod - def _get_or_create_device(cls, platform, diffsync, ids, attrs): + def _get_or_create_device(cls, diffsync, ids, attrs): """Attempt to get a Device, create a new one if necessary.""" device = None try: @@ -63,6 +63,7 @@ def _get_or_create_device(cls, platform, diffsync, ids, attrs): location = diffsync_utils.retrieve_submitted_value( job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="location" ) + platform = Platform.objects.get(name=attrs["platform__name"]) device = Device.objects.get(name=ids["name"], location=location) update_devices_without_primary_ip = location = diffsync_utils.retrieve_submitted_value( job=diffsync.job, @@ -164,25 +165,10 @@ def _update_device_with_attrs(cls, device, platform, ids, attrs, diffsync): def create(cls, diffsync, ids, attrs): """Create a new nautobot device using data scraped from a device.""" if diffsync.job.debug: - diffsync.job.debug.logger.debug("Creating device {ids}") - # Determine device platform - platform = None - if diffsync_utils.retrieve_submitted_value( - job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="platform" - ): - - platform = diffsync_utils.retrieve_submitted_value( - job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="platform" - ) - - ip_address = attrs["primary_ip4__host"], latform = diffsync_utils.retrieve_submitted_value( - job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="platform" - ) - else: - platform = Platform.objects.get(name=attrs["platform__name"]) - + diffsync.job.logger.debug("Creating device {ids} with {attrs}") + # Get or create Device, Interface and IP Address - device = cls._get_or_create_device(platform, diffsync, ids, attrs) + device = cls._get_or_create_device(diffsync, ids, attrs) if device: ip_address = diffsync_utils.get_or_create_ip_address( host=attrs["primary_ip4__host"], diff --git a/nautobot_device_onboarding/utils/diffsync_utils.py b/nautobot_device_onboarding/utils/diffsync_utils.py index 2e86d723..acb1de07 100644 --- a/nautobot_device_onboarding/utils/diffsync_utils.py +++ b/nautobot_device_onboarding/utils/diffsync_utils.py @@ -98,7 +98,10 @@ def retrieve_submitted_value(job, ip_address, query_string): that was parsed when the file was loaded. If a CSV file has not been submitted, return the value input into the job form. """ + print(vars(job)) + if job.debug: + job.logger.debug(f"Retrieving {query_string} for {ip_address}") if job.processed_csv_data: return job.processed_csv_data[ip_address][query_string] else: - getattr(job, "query_string") + return getattr(job, query_string) From d581d08085f46d24cf740123ff2ae4c9186de6f4 Mon Sep 17 00:00:00 2001 From: David Cates Date: Fri, 23 Feb 2024 14:19:38 -0700 Subject: [PATCH 116/225] bump version --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 2d6cb2fc..4bed9f36 100755 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nautobot-device-onboarding" -version = "3.0.2a3" +version = "3.0.2a4" description = "A app for Nautobot to easily onboard new devices." authors = ["Network to Code, LLC "] license = "Apache-2.0" From 090ad0875beaa0446f8d4ff558804b7e996310a7 Mon Sep 17 00:00:00 2001 From: David Cates Date: Fri, 23 Feb 2024 15:38:10 -0700 Subject: [PATCH 117/225] update NI --- .../adapters/network_importer_adapters.py | 86 ++++++++++--------- nautobot_device_onboarding/jobs.py | 2 +- 2 files changed, 47 insertions(+), 41 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index 2d475f19..d0cf919e 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -1,7 +1,7 @@ """DiffSync adapters.""" import time - +import json import diffsync from diffsync.enum import DiffSyncModelFlags from django.core.exceptions import ValidationError @@ -63,11 +63,12 @@ def load_ip_addresses(self): """ ip_address_hosts = set() for _, device_data in self.job.command_getter_result.items(): - for interface in device_data["interfaces"]: + for interface in json.loads(device_data["interfaces"]): for _, interface_data in interface.items(): for ip_address in interface_data["ip_addresses"]: - ip_address_hosts.add(ip_address["host"]) - + if ip_address: + ip_address_hosts.add(ip_address["ip_address"]) + ip_address_hosts.remove('') # do not attempt to filter ip addresses with empty strings for ip_address in IPAddress.objects.filter( host__in=ip_address_hosts, parent__namespace__name=self.job.namespace.name, @@ -260,7 +261,10 @@ def execute_command_getter(self): def _process_mac_address(self, mac_address): """Convert a mac address to match the value stored by Nautobot.""" - return str(EUI(mac_address, version=48, dialect=MacUnixExpandedUppercase)) + if mac_address: + return str(EUI(mac_address, version=48, dialect=MacUnixExpandedUppercase)) + else: + return "" def load_devices(self): """Load devices into the DiffSync store.""" @@ -269,7 +273,7 @@ def load_devices(self): self.add(network_device) if self.job.debug: self.job.logger.debug(f"Device {network_device} loaded.") - for interface in device_data["interfaces"]: + for interface in json.loads(device_data["interfaces"]): for interface_name, interface_data in interface.items(): network_interface = self.load_interface(hostname, interface_name, interface_data) network_device.add_child(network_interface) @@ -285,9 +289,9 @@ def load_interface(self, hostname, interface_name, interface_data): status__name=self.job.interface_status.name, type=interface_data["type"], mac_address=self._process_mac_address(interface_data["mac_address"]), - mtu=interface_data["mtu"], + mtu=interface_data["mtu"] if interface_data['mtu'] else 1500, description=interface_data["description"], - enabled=interface_data["enabled"], + enabled=interface_data["link_status"], mode=interface_data["802.1Q_mode"], untagged_vlan__name=interface_data["untagged_vlan"]["name"] if interface_data["untagged_vlan"] else None, ) @@ -299,28 +303,29 @@ def load_interface(self, hostname, interface_name, interface_data): def load_ip_addresses(self): """Load IP addresses into the DiffSync store.""" for hostname, device_data in self.job.command_getter_result.items(): - for interface in device_data["interfaces"]: + for interface in json.loads(device_data["interfaces"]): for interface_name, interface_data in interface.items(): for ip_address in interface_data["ip_addresses"]: - if self.job.debug: - self.job.logger.debug(f"Loading {ip_address} from {interface_name} on {hostname}") - network_ip_address = self.ip_address( - diffsync=self, - host=ip_address["host"], - mask_length=ip_address["mask_length"], - type="host", - ip_version=4, - status__name=self.job.ip_address_status.name, - ) - try: - self.add(network_ip_address) + if ip_address["ip_address"]: # the ip_address and mask_length may be empty, skip these if self.job.debug: - self.job.logger.debug(f"{network_ip_address} loaded.") - except diffsync.exceptions.ObjectAlreadyExists: - self.job.logger.warning( - f"{network_ip_address} is already loaded to the " - "DiffSync store. This is a duplicate IP Address." + self.job.logger.debug(f"Loading {ip_address} from {interface_name} on {hostname}") + network_ip_address = self.ip_address( + diffsync=self, + host=ip_address["ip_address"], + mask_length=int(ip_address["prefix_length"]), + type="host", + ip_version=4, + status__name=self.job.ip_address_status.name, ) + try: + self.add(network_ip_address) + if self.job.debug: + self.job.logger.debug(f"{network_ip_address} loaded.") + except diffsync.exceptions.ObjectAlreadyExists: + self.job.logger.warning( + f"{network_ip_address} is already loaded to the " + "DiffSync store. This is a duplicate IP Address." + ) def load_vlans(self): """Load vlans into the Diffsync store.""" @@ -329,7 +334,7 @@ def load_vlans(self): location_names[device.name] = device.location.name for hostname, device_data in self.job.command_getter_result.items(): - for interface in device_data["interfaces"]: + for interface in json.loads(device_data["interfaces"]): for _, interface_data in interface.items(): # add tagged vlans for tagged_vlan in interface_data["tagged_vlans"]: @@ -363,24 +368,25 @@ def load_vlans(self): def load_ip_address_to_interfaces(self): """Load ip address interface assignments into the Diffsync store.""" for hostname, device_data in self.job.command_getter_result.items(): - for interface in device_data["interfaces"]: + for interface in json.loads(device_data["interfaces"]): for interface_name, interface_data in interface.items(): for ip_address in interface_data["ip_addresses"]: - network_ip_address_to_interface = self.ipaddress_to_interface( - diffsync=self, - interface__device__name=hostname, - interface__name=interface_name, - ip_address__host=ip_address["host"], - ip_address__mask_length=ip_address["mask_length"], - ) - self.add(network_ip_address_to_interface) - if self.job.debug: - self.job.logger.debug(f"IP Address to interface {network_ip_address_to_interface} loaded.") + if ip_address["ip_address"]: # the ip_address and mask_length may be empty, skip these + network_ip_address_to_interface = self.ipaddress_to_interface( + diffsync=self, + interface__device__name=hostname, + interface__name=interface_name, + ip_address__host=ip_address["ip_address"], + ip_address__mask_length=int(ip_address["prefix_length"]) if ip_address["prefix_length"] else None, + ) + self.add(network_ip_address_to_interface) + if self.job.debug: + self.job.logger.debug(f"IP Address to interface {network_ip_address_to_interface} loaded.") def load_tagged_vlans_to_interface(self): """Load tagged vlan to interface assignments into the Diffsync store.""" for hostname, device_data in self.job.command_getter_result.items(): - for interface in device_data["interfaces"]: + for interface in json.loads(device_data["interfaces"]): for interface_name, interface_data in interface.items(): network_tagged_vlans_to_interface = self.tagged_vlans_to_interface( diffsync=self, @@ -395,7 +401,7 @@ def load_tagged_vlans_to_interface(self): def load_lag_to_interface(self): """Load lag interface assignments into the Diffsync store.""" for hostname, device_data in self.job.command_getter_result.items(): - for interface in device_data["interfaces"]: + for interface in json.loads(device_data["interfaces"]): for interface_name, interface_data in interface.items(): network_lag_to_interface = self.lag_to_interface( diffsync=self, diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 25890f26..26f03b17 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -489,7 +489,7 @@ def run( "ip_address_status": ip_address_status, "default_prefix_status": default_prefix_status, "location": location, - "devices": devices, + "devices": self.filtered_devices, "device_role": device_role, "tag": tag, "sync_vlans": sync_vlans, From 5446c348189f327f02d85ab4df9e0af0a9f758cd Mon Sep 17 00:00:00 2001 From: David Cates Date: Fri, 23 Feb 2024 16:05:23 -0700 Subject: [PATCH 118/225] remove tag filter option --- nautobot_device_onboarding/jobs.py | 22 +++------------------- 1 file changed, 3 insertions(+), 19 deletions(-) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 831e7529..f28c0040 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -12,7 +12,7 @@ from nautobot.core.celery import register_jobs from nautobot.dcim.models import Device, DeviceType, Location, Platform from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices -from nautobot.extras.models import Role, SecretsGroup, SecretsGroupAssociation, Status, Tag +from nautobot.extras.models import Role, SecretsGroup, SecretsGroupAssociation, Status from nautobot.ipam.models import Namespace from nautobot_plugin_nornir.constants import NORNIR_SETTINGS from nautobot_plugin_nornir.plugins.inventory.nautobot_orm import NautobotORMInventory @@ -583,12 +583,6 @@ class Meta: required=False, description="Only update devices with the selected role.", ) - tag = ObjectVar( - model=Tag, - query_params={"content_types": "dcim.device"}, - required=False, - description="Only update devices with the selected tag.", - ) def load_source_adapter(self): """Load onboarding network adapter.""" @@ -613,7 +607,6 @@ def run( devices, device_role, sync_vlans, - tag, *args, **kwargs, ): @@ -628,7 +621,6 @@ def run( self.location = location self.devices = devices self.device_role = device_role - self.tag = tag self.sync_vlans = sync_vlans # Filter devices based on form input @@ -639,8 +631,7 @@ def run( device_filter["location"] = location if self.device_role: device_filter["role"] = device_role - if self.tag: - device_filter["tags"] = tag + self.filtered_devices = Device.objects.filter(**device_filter) self.job_result.task_kwargs = { @@ -648,9 +639,8 @@ def run( "ip_address_status": ip_address_status, "default_prefix_status": default_prefix_status, "location": location, - "devices": devices, + "devices": self.filtered_devices, "device_role": device_role, - "tag": tag, "sync_vlans": sync_vlans, } @@ -758,12 +748,6 @@ class CommandGetterNetworkImporter(Job): required=False, description="Only update devices with the selected role.", ) - tag = ObjectVar( - model=Tag, - query_params={"content_types": "dcim.device"}, - required=False, - description="Only update devices with the selected tag.", - ) port = IntegerVar(default=22) timeout = IntegerVar(default=30) From cc3d373ddac57cd7086e031f4d465a739a434d94 Mon Sep 17 00:00:00 2001 From: David Cates Date: Fri, 23 Feb 2024 16:07:16 -0700 Subject: [PATCH 119/225] remove tag filter --- nautobot_device_onboarding/jobs.py | 20 ++------------------ 1 file changed, 2 insertions(+), 18 deletions(-) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 26f03b17..2bc8f1eb 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -9,7 +9,7 @@ from nautobot.core.celery import register_jobs from nautobot.dcim.models import Device, DeviceType, Location, Platform from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices -from nautobot.extras.models import Role, SecretsGroup, SecretsGroupAssociation, Status, Tag +from nautobot.extras.models import Role, SecretsGroup, SecretsGroupAssociation, Status from nautobot.ipam.models import Namespace from nautobot_plugin_nornir.constants import NORNIR_SETTINGS from nautobot_plugin_nornir.plugins.inventory.nautobot_orm import NautobotORMInventory @@ -424,12 +424,6 @@ class Meta: required=False, description="Only update devices with the selected role.", ) - tag = ObjectVar( - model=Tag, - query_params={"content_types": "dcim.device"}, - required=False, - description="Only update devices with the selected tag.", - ) def load_source_adapter(self): """Load onboarding network adapter.""" @@ -454,7 +448,6 @@ def run( devices, device_role, sync_vlans, - tag, *args, **kwargs, ): @@ -469,7 +462,6 @@ def run( self.location = location self.devices = devices self.device_role = device_role - self.tag = tag self.sync_vlans = sync_vlans # Filter devices based on form input @@ -480,8 +472,7 @@ def run( device_filter["location"] = location if self.device_role: device_filter["role"] = device_role - if self.tag: - device_filter["tags"] = tag + self.filtered_devices = Device.objects.filter(**device_filter) self.job_result.task_kwargs = { @@ -491,7 +482,6 @@ def run( "location": location, "devices": self.filtered_devices, "device_role": device_role, - "tag": tag, "sync_vlans": sync_vlans, } @@ -572,12 +562,6 @@ class CommandGetterNetworkImporter(Job): required=False, description="Only update devices with the selected role.", ) - tag = ObjectVar( - model=Tag, - query_params={"content_types": "dcim.device"}, - required=False, - description="Only update devices with the selected tag.", - ) port = IntegerVar(default=22) timeout = IntegerVar(default=30) From 2ffd7199df7868cb48a4b866c7b5db622798d608 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Fri, 23 Feb 2024 17:47:40 -0600 Subject: [PATCH 120/225] decouple nornir play from job to remove enqueue job option --- .../adapters/network_importer_adapters.py | 31 ++------ .../diffsync/adapters/onboarding_adapters.py | 26 ++----- nautobot_device_onboarding/jobs.py | 73 ++---------------- .../nornir_plays/command_getter.py | 77 ++++++++++++++++++- .../utils/inventory_creator.py | 7 +- 5 files changed, 96 insertions(+), 118 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index 2d475f19..c5c4a971 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -1,19 +1,15 @@ """DiffSync adapters.""" -import time - import diffsync from diffsync.enum import DiffSyncModelFlags from django.core.exceptions import ValidationError -from nautobot.apps.choices import JobResultStatusChoices from nautobot.dcim.models import Interface -from nautobot.extras.models import Job, JobResult -from nautobot.ipam.models import VLAN, IPAddress -from nautobot_ssot.contrib import NautobotAdapter -from netaddr import EUI, mac_unix_expanded - +from nautobot.ipam.models import IPAddress, VLAN from nautobot_device_onboarding.diffsync.models import network_importer_models +from nautobot_device_onboarding.nornir_plays.command_getter import command_getter_ni from nautobot_device_onboarding.utils import diffsync_utils +from nautobot_ssot.contrib import NautobotAdapter +from netaddr import EUI, mac_unix_expanded class FilteredNautobotAdapter(NautobotAdapter): @@ -231,26 +227,15 @@ def _handle_failed_devices(self, device_data): def execute_command_getter(self): """Start the CommandGetterDO job to query devices for data.""" - command_getter_job = Job.objects.get(name="Command Getter for Network Importer") - job_kwargs = self.job.prepare_job_kwargs(self.job.job_result.task_kwargs) - kwargs = self.job.serialize_data(job_kwargs) - result = JobResult.enqueue_job( - job_model=command_getter_job, user=self.job.user, celery_kwargs=self.job.job_result.celery_kwargs, **kwargs - ) - while True: - if result.status not in JobResultStatusChoices.READY_STATES: - time.sleep(5) - result.refresh_from_db() - else: - break + result = command_getter_ni(self.job.job_result, self.job.logger.getEffectiveLevel(), self.job.job_result.task_kwargs) if self.job.debug: - self.job.logger.debug(f"Command Getter Job Result: {result.result}") + self.job.logger.debug(f"Command Getter Job Result: {result}") # verify data returned is a dict - data_type_check = diffsync_utils.check_data_type(result.result) + data_type_check = diffsync_utils.check_data_type(result) if self.job.debug: self.job.logger.debug(f"CommandGetter data type check resut: {data_type_check}") if data_type_check: - self._handle_failed_devices(device_data=result.result) + self._handle_failed_devices(device_data=result) else: self.job.logger.error( "Data returned from CommandGetter is not the correct type. " diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index e72d3f98..ba589d9f 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -1,19 +1,16 @@ """DiffSync adapters.""" -import time from collections import defaultdict -from typing import Dict, FrozenSet, DefaultDict, Hashable, Tuple, Type +from typing import DefaultDict, Dict, FrozenSet, Hashable, Tuple, Type import diffsync import netaddr from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ValidationError from django.db.models import Model -from nautobot.apps.choices import JobResultStatusChoices from nautobot.dcim.models import Device, DeviceType, Manufacturer, Platform -from nautobot.extras.models import Job, JobResult - from nautobot_device_onboarding.diffsync.models import onboarding_models +from nautobot_device_onboarding.nornir_plays.command_getter import command_getter_do from nautobot_device_onboarding.utils import diffsync_utils ParameterSet = FrozenSet[Tuple[str, Hashable]] @@ -202,25 +199,14 @@ def execute_command_getter(self): ) raise Exception("Platform.network_driver missing") # pylint: disable=broad-exception-raised - command_getter_job = Job.objects.get(name="Command Getter for Device Onboarding") - job_kwargs = self.job.prepare_job_kwargs(self.job.job_result.task_kwargs) - kwargs = self.job.serialize_data(job_kwargs) - result = JobResult.enqueue_job( - job_model=command_getter_job, user=self.job.user, celery_kwargs=self.job.job_result.celery_kwargs, **kwargs - ) - while True: - if result.status not in JobResultStatusChoices.READY_STATES: - time.sleep(5) - result.refresh_from_db() - else: - break + result = command_getter_do(self.job.job_result, self.job.logger.getEffectiveLevel(), self.job.job_result.task_kwargs) if self.job.debug: - self.job.logger.debug(f"Command Getter Job Result: {result.result}") - data_type_check = diffsync_utils.check_data_type(result.result) + self.job.logger.debug(f"Command Getter Job Result: {result}") + data_type_check = diffsync_utils.check_data_type(result) if self.job.debug: self.job.logger.debug(f"CommandGetter data type check resut: {data_type_check}") if data_type_check: - self._handle_failed_devices(device_data=result.result) + self._handle_failed_devices(device_data=result) else: self.job.logger.error( "Data returned from CommandGetter is not the correct type. " diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 25890f26..f8468719 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -11,12 +11,6 @@ from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from nautobot.extras.models import Role, SecretsGroup, SecretsGroupAssociation, Status, Tag from nautobot.ipam.models import Namespace -from nautobot_plugin_nornir.constants import NORNIR_SETTINGS -from nautobot_plugin_nornir.plugins.inventory.nautobot_orm import NautobotORMInventory -from nautobot_ssot.jobs.base import DataSource -from nornir import InitNornir -from nornir.core.plugins.inventory import InventoryPluginRegister, TransformFunctionRegister - from nautobot_device_onboarding.diffsync.adapters.network_importer_adapters import ( NetworkImporterNautobotAdapter, NetworkImporterNetworkAdapter, @@ -28,17 +22,8 @@ from nautobot_device_onboarding.exceptions import OnboardException from nautobot_device_onboarding.helpers import onboarding_task_fqdn_to_ip from nautobot_device_onboarding.netdev_keeper import NetdevKeeper -from nautobot_device_onboarding.nornir_plays.command_getter import netmiko_send_commands -from nautobot_device_onboarding.nornir_plays.empty_inventory import EmptyInventory -from nautobot_device_onboarding.nornir_plays.logger import NornirLogger -from nautobot_device_onboarding.nornir_plays.processor import ProcessorDO -from nautobot_device_onboarding.utils.helper import add_platform_parsing_info, get_job_filter -from nautobot_device_onboarding.utils.inventory_creator import _set_inventory - -InventoryPluginRegister.register("nautobot-inventory", NautobotORMInventory) -InventoryPluginRegister.register("empty-inventory", EmptyInventory) -TransformFunctionRegister.register("transform_to_add_command_parser_info", add_platform_parsing_info) - +from nautobot_device_onboarding.nornir_plays.command_getter import command_getter_do, command_getter_ni +from nautobot_ssot.jobs.base import DataSource PLUGIN_SETTINGS = settings.PLUGINS_CONFIG["nautobot_device_onboarding"] @@ -517,34 +502,8 @@ class Meta: platform = ObjectVar(model=Platform, required=False) def run(self, *args, **kwargs): - """Process onboarding task from ssot-ni job.""" - self.ip_addresses = kwargs["ip_addresses"].replace(" ", "").split(",") - self.port = kwargs["port"] - self.timeout = kwargs["timeout"] - self.secrets_group = kwargs["secrets_group"] - self.platform = kwargs["platform"] - - # Initiate Nornir instance with empty inventory - try: - logger = NornirLogger(self.job_result, log_level=0) - compiled_results = {} - with InitNornir( - runner=NORNIR_SETTINGS.get("runner"), - logging={"enabled": False}, - inventory={ - "plugin": "empty-inventory", - }, - ) as nornir_obj: - nr_with_processors = nornir_obj.with_processors([ProcessorDO(logger, compiled_results)]) - for entered_ip in self.ip_addresses: - single_host_inventory_constructed = _set_inventory( - entered_ip, self.platform, self.port, self.secrets_group - ) - nr_with_processors.inventory.hosts.update(single_host_inventory_constructed) - nr_with_processors.run(task=netmiko_send_commands, command_getter_job="device_onboarding") - except Exception as err: # pylint: disable=broad-exception-caught - self.logger.error("Error: %s", err) - return err + """Run command getter.""" + compiled_results = command_getter_do(self.job_result, self.logger.getEffectiveLevel(), kwargs) return compiled_results @@ -590,28 +549,8 @@ class Meta: hidden = False def run(self, *args, **kwargs): - """Process onboarding task from ssot-ni job.""" - try: - logger = NornirLogger(self.job_result, log_level=0) - compiled_results = {} - qs = get_job_filter(kwargs) - with InitNornir( - runner=NORNIR_SETTINGS.get("runner"), - logging={"enabled": False}, - inventory={ - "plugin": "nautobot-inventory", - "options": { - "credentials_class": NORNIR_SETTINGS.get("credentials"), - "queryset": qs, - }, - "transform_function": "transform_to_add_command_parser_info", - }, - ) as nornir_obj: - nr_with_processors = nornir_obj.with_processors([ProcessorDO(logger, compiled_results)]) - nr_with_processors.run(task=netmiko_send_commands, command_getter_job="network_importer") - except Exception as err: # pylint: disable=broad-exception-caught - self.logger.info("Error: %s", err) - return err + """Run command getter.""" + compiled_results = command_getter_ni(self.job_result, self.logger.getEffectiveLevel(), kwargs) return compiled_results diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index c2d6c2cb..bee8d9c7 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -1,9 +1,21 @@ -"""Command Getter.""" - +"""Nornir job for backing up actual config.""" +# pylint: disable=relative-beyond-top-level +from nautobot_device_onboarding.constants import NETMIKO_TO_NAPALM_STATIC +from nautobot_device_onboarding.nornir_plays.empty_inventory import EmptyInventory +from nautobot_device_onboarding.nornir_plays.logger import NornirLogger +from nautobot_device_onboarding.nornir_plays.processor import ProcessorDO +from nautobot_device_onboarding.utils.helper import add_platform_parsing_info, get_job_filter +from nautobot_device_onboarding.utils.inventory_creator import _set_inventory +from nautobot_plugin_nornir.constants import NORNIR_SETTINGS +from nautobot_plugin_nornir.plugins.inventory.nautobot_orm import NautobotORMInventory +from nornir import InitNornir +from nornir.core.plugins.inventory import InventoryPluginRegister, TransformFunctionRegister from nornir.core.task import Result, Task from nornir_netmiko.tasks import netmiko_send_command -from nautobot_device_onboarding.constants import NETMIKO_TO_NAPALM_STATIC +InventoryPluginRegister.register("nautobot-inventory", NautobotORMInventory) +InventoryPluginRegister.register("empty-inventory", EmptyInventory) +TransformFunctionRegister.register("transform_to_add_command_parser_info", add_platform_parsing_info) def _get_commands_to_run(yaml_parsed_info, command_getter_job): @@ -32,3 +44,62 @@ def netmiko_send_commands(task: Task, command_getter_job: str): use_textfsm=command_use_textfsm, read_timeout=60, ) + + +def command_getter_do(job_result, log_level, kwargs): + """Nornir play to run show commands.""" + logger = NornirLogger(job_result, log_level) + + ip_addresses = kwargs["ip_addresses"].replace(" ", "").split(",") + port = kwargs["port"] + timeout = kwargs["timeout"] + secrets_group = kwargs["secrets_group"] + platform = kwargs["platform"] + # Initiate Nornir instance with empty inventory + try: + logger = NornirLogger(job_result, log_level=0) + compiled_results = {} + with InitNornir( + runner=NORNIR_SETTINGS.get("runner"), + logging={"enabled": False}, + inventory={ + "plugin": "empty-inventory", + }, + ) as nornir_obj: + nr_with_processors = nornir_obj.with_processors([ProcessorDO(logger, compiled_results)]) + for entered_ip in ip_addresses: + single_host_inventory_constructed = _set_inventory( + entered_ip, platform, port, secrets_group + ) + nr_with_processors.inventory.hosts.update(single_host_inventory_constructed) + nr_with_processors.run(task=netmiko_send_commands, command_getter_job="device_onboarding") + except Exception as err: # pylint: disable=broad-exception-caught + logger.error("Error: %s", err) + return err + return compiled_results + + +def command_getter_ni(job_result, log_level, kwargs): + """Process onboarding task from ssot-ni job.""" + logger = NornirLogger(job_result, log_level) + try: + compiled_results = {} + qs = get_job_filter(kwargs) + with InitNornir( + runner=NORNIR_SETTINGS.get("runner"), + logging={"enabled": False}, + inventory={ + "plugin": "nautobot-inventory", + "options": { + "credentials_class": NORNIR_SETTINGS.get("credentials"), + "queryset": qs, + }, + "transform_function": "transform_to_add_command_parser_info", + }, + ) as nornir_obj: + nr_with_processors = nornir_obj.with_processors([ProcessorDO(logger, compiled_results)]) + nr_with_processors.run(task=netmiko_send_commands, command_getter_job="network_importer") + except Exception as err: # pylint: disable=broad-exception-caught + logger.info("Error: %s", err) + return err + return compiled_results diff --git a/nautobot_device_onboarding/utils/inventory_creator.py b/nautobot_device_onboarding/utils/inventory_creator.py index e297a48d..ab677109 100755 --- a/nautobot_device_onboarding/utils/inventory_creator.py +++ b/nautobot_device_onboarding/utils/inventory_creator.py @@ -2,11 +2,10 @@ from django.conf import settings from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices -from netmiko import SSHDetect -from nornir.core.inventory import ConnectionOptions, Host - from nautobot_device_onboarding.exceptions import OnboardException from nautobot_device_onboarding.utils.helper import _get_platform_parsing_info +from netmiko import SSHDetect +from nornir.core.inventory import ConnectionOptions, Host def _parse_credentials(credentials): @@ -73,8 +72,6 @@ def _set_inventory(host_ip, platform, port, secrets_group): parsing_info = _get_platform_parsing_info(platform) else: parsing_info = {} - print(parsing_info) - print(type(parsing_info)) host = Host( name=host_ip, From a16571beacc8baecb6622d3f3aa112f4aa39afcf Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Fri, 23 Feb 2024 18:01:16 -0600 Subject: [PATCH 121/225] clean up via black --- .../adapters/network_importer_adapters.py | 20 ++++++++++++------- .../diffsync/adapters/onboarding_adapters.py | 4 +++- .../nornir_plays/command_getter.py | 5 ++--- .../utils/jinja_filters.py | 6 +++--- 4 files changed, 21 insertions(+), 14 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index c0a5ad31..b71fceb5 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -65,7 +65,7 @@ def load_ip_addresses(self): for ip_address in interface_data["ip_addresses"]: if ip_address: ip_address_hosts.add(ip_address["ip_address"]) - ip_address_hosts.remove('') # do not attempt to filter ip addresses with empty strings + ip_address_hosts.remove("") # do not attempt to filter ip addresses with empty strings for ip_address in IPAddress.objects.filter( host__in=ip_address_hosts, parent__namespace__name=self.job.namespace.name, @@ -229,7 +229,9 @@ def _handle_failed_devices(self, device_data): def execute_command_getter(self): """Start the CommandGetterDO job to query devices for data.""" - result = command_getter_ni(self.job.job_result, self.job.logger.getEffectiveLevel(), self.job.job_result.task_kwargs) + result = command_getter_ni( + self.job.job_result, self.job.logger.getEffectiveLevel(), self.job.job_result.task_kwargs + ) if self.job.debug: self.job.logger.debug(f"Command Getter Job Result: {result}") # verify data returned is a dict @@ -275,7 +277,7 @@ def load_interface(self, hostname, interface_name, interface_data): status__name=self.job.interface_status.name, type=interface_data["type"], mac_address=self._process_mac_address(interface_data["mac_address"]), - mtu=interface_data["mtu"] if interface_data['mtu'] else 1500, + mtu=interface_data["mtu"] if interface_data["mtu"] else 1500, description=interface_data["description"], enabled=interface_data["link_status"], mode=interface_data["802.1Q_mode"], @@ -292,7 +294,7 @@ def load_ip_addresses(self): for interface in json.loads(device_data["interfaces"]): for interface_name, interface_data in interface.items(): for ip_address in interface_data["ip_addresses"]: - if ip_address["ip_address"]: # the ip_address and mask_length may be empty, skip these + if ip_address["ip_address"]: # the ip_address and mask_length may be empty, skip these if self.job.debug: self.job.logger.debug(f"Loading {ip_address} from {interface_name} on {hostname}") network_ip_address = self.ip_address( @@ -357,17 +359,21 @@ def load_ip_address_to_interfaces(self): for interface in json.loads(device_data["interfaces"]): for interface_name, interface_data in interface.items(): for ip_address in interface_data["ip_addresses"]: - if ip_address["ip_address"]: # the ip_address and mask_length may be empty, skip these + if ip_address["ip_address"]: # the ip_address and mask_length may be empty, skip these network_ip_address_to_interface = self.ipaddress_to_interface( diffsync=self, interface__device__name=hostname, interface__name=interface_name, ip_address__host=ip_address["ip_address"], - ip_address__mask_length=int(ip_address["prefix_length"]) if ip_address["prefix_length"] else None, + ip_address__mask_length=( + int(ip_address["prefix_length"]) if ip_address["prefix_length"] else None + ), ) self.add(network_ip_address_to_interface) if self.job.debug: - self.job.logger.debug(f"IP Address to interface {network_ip_address_to_interface} loaded.") + self.job.logger.debug( + f"IP Address to interface {network_ip_address_to_interface} loaded." + ) def load_tagged_vlans_to_interface(self): """Load tagged vlan to interface assignments into the Diffsync store.""" diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index ba589d9f..b2a684eb 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -199,7 +199,9 @@ def execute_command_getter(self): ) raise Exception("Platform.network_driver missing") # pylint: disable=broad-exception-raised - result = command_getter_do(self.job.job_result, self.job.logger.getEffectiveLevel(), self.job.job_result.task_kwargs) + result = command_getter_do( + self.job.job_result, self.job.logger.getEffectiveLevel(), self.job.job_result.task_kwargs + ) if self.job.debug: self.job.logger.debug(f"Command Getter Job Result: {result}") data_type_check = diffsync_utils.check_data_type(result) diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index bee8d9c7..f9330315 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -1,4 +1,5 @@ """Nornir job for backing up actual config.""" + # pylint: disable=relative-beyond-top-level from nautobot_device_onboarding.constants import NETMIKO_TO_NAPALM_STATIC from nautobot_device_onboarding.nornir_plays.empty_inventory import EmptyInventory @@ -68,9 +69,7 @@ def command_getter_do(job_result, log_level, kwargs): ) as nornir_obj: nr_with_processors = nornir_obj.with_processors([ProcessorDO(logger, compiled_results)]) for entered_ip in ip_addresses: - single_host_inventory_constructed = _set_inventory( - entered_ip, platform, port, secrets_group - ) + single_host_inventory_constructed = _set_inventory(entered_ip, platform, port, secrets_group) nr_with_processors.inventory.hosts.update(single_host_inventory_constructed) nr_with_processors.run(task=netmiko_send_commands, command_getter_job="device_onboarding") except Exception as err: # pylint: disable=broad-exception-caught diff --git a/nautobot_device_onboarding/utils/jinja_filters.py b/nautobot_device_onboarding/utils/jinja_filters.py index e8ac5cb0..d9887424 100755 --- a/nautobot_device_onboarding/utils/jinja_filters.py +++ b/nautobot_device_onboarding/utils/jinja_filters.py @@ -14,8 +14,8 @@ def fix_interfaces(interfaces): int_values["ip_addresses"].append( {"ip_address": int_values.get("ip_address", ""), "prefix_length": int_values.get("prefix_length", "")} ) - if 'up' in int_values['link_status']: - int_values['link_status'] = True + if "up" in int_values["link_status"]: + int_values["link_status"] = True else: - int_values['link_status'] = False + int_values["link_status"] = False return interfaces From 261eba10b1ac1a0127ed89e3059c6f82b76fbbba Mon Sep 17 00:00:00 2001 From: David Cates Date: Fri, 23 Feb 2024 17:05:25 -0700 Subject: [PATCH 122/225] update csv import logging --- nautobot_device_onboarding/jobs.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index f28c0040..57521a79 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -329,12 +329,13 @@ def _process_csv_data(self, csv_file): row_count = 1 for row in csv_reader: try: - query = f"device_role: {row.get('location_name')}" + query = f"location_name: {row.get('location_name')}, location_parent_name: {row.get('location_parent_name')}" if row.get("location_parent_name"): location = Location.objects.get( name=row["location_name"].strip(), parent__name=row["location_parent_name"].strip() ) else: + query = query = f"location_name: {row.get('location_name')}" location = Location.objects.get(name=row["location_name"].strip(), parent=None) query = f"device_role: {row.get('device_role_name')}" device_role = Role.objects.get( From b2a3d905d165c0337866a6198ddd2121a7225bab Mon Sep 17 00:00:00 2001 From: David Cates Date: Mon, 26 Feb 2024 12:02:40 -0700 Subject: [PATCH 123/225] fix bug in _process_csv_data --- nautobot_device_onboarding/jobs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 57521a79..ad850b2e 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -329,7 +329,7 @@ def _process_csv_data(self, csv_file): row_count = 1 for row in csv_reader: try: - query = f"location_name: {row.get('location_name')}, location_parent_name: {row.get('location_parent_name')}" + query = f"location_name: {row.get('location_name')}, location_parent_name: {row.get('location_parent_name')}" if row.get("location_parent_name"): location = Location.objects.get( name=row["location_name"].strip(), parent__name=row["location_parent_name"].strip() From 57798ad71e138d05e38f7278683782bf96ca03c3 Mon Sep 17 00:00:00 2001 From: David Cates Date: Mon, 26 Feb 2024 12:49:07 -0700 Subject: [PATCH 124/225] remove command getter job --- .../adapters/network_importer_adapters.py | 22 +-- .../diffsync/adapters/onboarding_adapters.py | 27 +--- .../diffsync/models/onboarding_models.py | 2 +- nautobot_device_onboarding/jobs.py | 140 ++++-------------- .../nornir_plays/command_getter.py | 99 ++++++++++++- .../utils/diffsync_utils.py | 2 +- .../utils/inventory_creator.py | 4 +- 7 files changed, 143 insertions(+), 153 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index 2d475f19..2fdfd831 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -5,14 +5,13 @@ import diffsync from diffsync.enum import DiffSyncModelFlags from django.core.exceptions import ValidationError -from nautobot.apps.choices import JobResultStatusChoices from nautobot.dcim.models import Interface -from nautobot.extras.models import Job, JobResult from nautobot.ipam.models import VLAN, IPAddress from nautobot_ssot.contrib import NautobotAdapter from netaddr import EUI, mac_unix_expanded from nautobot_device_onboarding.diffsync.models import network_importer_models +from nautobot_device_onboarding.nornir_plays.command_getter import command_getter_ni from nautobot_device_onboarding.utils import diffsync_utils @@ -231,26 +230,17 @@ def _handle_failed_devices(self, device_data): def execute_command_getter(self): """Start the CommandGetterDO job to query devices for data.""" - command_getter_job = Job.objects.get(name="Command Getter for Network Importer") - job_kwargs = self.job.prepare_job_kwargs(self.job.job_result.task_kwargs) - kwargs = self.job.serialize_data(job_kwargs) - result = JobResult.enqueue_job( - job_model=command_getter_job, user=self.job.user, celery_kwargs=self.job.job_result.celery_kwargs, **kwargs + result = command_getter_ni( + self.job.job_result, self.job.logger.getEffectiveLevel(), self.job.job_result.task_kwargs ) - while True: - if result.status not in JobResultStatusChoices.READY_STATES: - time.sleep(5) - result.refresh_from_db() - else: - break if self.job.debug: - self.job.logger.debug(f"Command Getter Job Result: {result.result}") + self.job.logger.debug(f"Command Getter Job Result: {result}") # verify data returned is a dict - data_type_check = diffsync_utils.check_data_type(result.result) + data_type_check = diffsync_utils.check_data_type(result) if self.job.debug: self.job.logger.debug(f"CommandGetter data type check resut: {data_type_check}") if data_type_check: - self._handle_failed_devices(device_data=result.result) + self._handle_failed_devices(device_data=result) else: self.job.logger.error( "Data returned from CommandGetter is not the correct type. " diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index 2563af56..84a29424 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -2,18 +2,17 @@ import time from collections import defaultdict -from typing import Dict, FrozenSet, DefaultDict, Hashable, Tuple, Type +from typing import DefaultDict, Dict, FrozenSet, Hashable, Tuple, Type import diffsync import netaddr from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ValidationError from django.db.models import Model -from nautobot.apps.choices import JobResultStatusChoices from nautobot.dcim.models import Device, DeviceType, Manufacturer, Platform -from nautobot.extras.models import Job, JobResult from nautobot_device_onboarding.diffsync.models import onboarding_models +from nautobot_device_onboarding.nornir_plays.command_getter import command_getter_do from nautobot_device_onboarding.utils import diffsync_utils ParameterSet = FrozenSet[Tuple[str, Hashable]] @@ -203,28 +202,16 @@ def execute_command_getter(self): ) raise Exception("Platform.network_driver missing") # pylint: disable=broad-exception-raised - command_getter_job = Job.objects.get(name="Command Getter for Device Onboarding") - if self.job.processed_csv_data: - kwargs = self.job.job_result.task_kwargs - else: - job_kwargs = self.job.prepare_job_kwargs(self.job.job_result.task_kwargs) - kwargs = self.job.serialize_data(job_kwargs) - result = JobResult.enqueue_job( - job_model=command_getter_job, user=self.job.user, celery_kwargs=self.job.job_result.celery_kwargs, **kwargs + result = command_getter_do( + self.job.job_result, self.job.logger.getEffectiveLevel(), self.job.job_result.task_kwargs ) - while True: - if result.status not in JobResultStatusChoices.READY_STATES: - time.sleep(5) - result.refresh_from_db() - else: - break if self.job.debug: - self.job.logger.debug(f"Command Getter Job Result: {result.result}") - data_type_check = diffsync_utils.check_data_type(result.result) + self.job.logger.debug(f"Command Getter Job Result: {result}") + data_type_check = diffsync_utils.check_data_type(result) if self.job.debug: self.job.logger.debug(f"CommandGetter data type check resut: {data_type_check}") if data_type_check: - self._handle_failed_devices(device_data=result.result) + self._handle_failed_devices(device_data=result) else: self.job.logger.error( "Data returned from CommandGetter is not the correct type. " diff --git a/nautobot_device_onboarding/diffsync/models/onboarding_models.py b/nautobot_device_onboarding/diffsync/models/onboarding_models.py index c7b7f6d6..37c3796c 100644 --- a/nautobot_device_onboarding/diffsync/models/onboarding_models.py +++ b/nautobot_device_onboarding/diffsync/models/onboarding_models.py @@ -166,7 +166,7 @@ def create(cls, diffsync, ids, attrs): """Create a new nautobot device using data scraped from a device.""" if diffsync.job.debug: diffsync.job.logger.debug("Creating device {ids} with {attrs}") - + # Get or create Device, Interface and IP Address device = cls._get_or_create_device(diffsync, ids, attrs) if device: diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index ad850b2e..e3bce380 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -31,7 +31,11 @@ from nautobot_device_onboarding.exceptions import OnboardException from nautobot_device_onboarding.helpers import onboarding_task_fqdn_to_ip from nautobot_device_onboarding.netdev_keeper import NetdevKeeper -from nautobot_device_onboarding.nornir_plays.command_getter import netmiko_send_commands +from nautobot_device_onboarding.nornir_plays.command_getter import ( + command_getter_do, + command_getter_ni, + netmiko_send_commands, +) from nautobot_device_onboarding.nornir_plays.empty_inventory import EmptyInventory from nautobot_device_onboarding.nornir_plays.logger import NornirLogger from nautobot_device_onboarding.nornir_plays.processor import ProcessorDO @@ -461,19 +465,21 @@ def run( else: # Verify that all requried form inputs have been provided required_inputs = { - "location": location, - "namespace": namespace, - "ip_addresses": ip_addresses, - "device_role": device_role, - "device_status": device_status, - "interface_status": interface_status, - "ip_address_status": ip_address_status, - "port": port, - "timeout": timeout, - "secrets_group": secrets_group + "location": location, + "namespace": namespace, + "ip_addresses": ip_addresses, + "device_role": device_role, + "device_status": device_status, + "interface_status": interface_status, + "ip_address_status": ip_address_status, + "port": port, + "timeout": timeout, + "secrets_group": secrets_group, } - - missing_required_inputs = [form_field for form_field, input_value in required_inputs.items() if not input_value] + + missing_required_inputs = [ + form_field for form_field, input_value in required_inputs.items() if not input_value + ] if not missing_required_inputs: pass else: @@ -652,7 +658,7 @@ class CommandGetterDO(Job): """Simple Job to Execute Show Command.""" class Meta: - """Meta object boilerplate for onboarding.""" + """CommandGetterDO Job Meta.""" name = "Command Getter for Device Onboarding" description = "Login to a device(s) and run commands." @@ -668,87 +674,19 @@ class Meta: platform = ObjectVar(model=Platform, required=False) def run(self, *args, **kwargs): - """Process onboarding task from ssot-ni job.""" - if kwargs["csv_file"]: - self.ip_addresses = [] - for ip_address in kwargs["csv_file"]: - self.ip_addresses.append(ip_address) - else: - self.ip_addresses = kwargs["ip_addresses"].replace(" ", "").split(",") - self.port = kwargs["port"] - self.timeout = kwargs["timeout"] - self.secrets_group = kwargs["secrets_group"] - self.platform = kwargs["platform"] - - # Initiate Nornir instance with empty inventory - try: - logger = NornirLogger(self.job_result, log_level=0) - compiled_results = {} - with InitNornir( - runner=NORNIR_SETTINGS.get("runner"), - logging={"enabled": False}, - inventory={ - "plugin": "empty-inventory", - }, - ) as nornir_obj: - nr_with_processors = nornir_obj.with_processors([ProcessorDO(logger, compiled_results)]) - for entered_ip in self.ip_addresses: - if kwargs["csv_file"]: - # get platform if one was provided via csv - platform = None - platform_id = kwargs["csv_file"][entered_ip]["platform"] - if platform_id: - platform = Platform.objects.get(id=platform_id) - - # get secrets group if one was provided via csv - secrets_group = None - secrets_group_id = kwargs["csv_file"][entered_ip]["secrets_group"] - if secrets_group_id: - secrets_group = SecretsGroup.objects.get(id=secrets_group_id) - - single_host_inventory_constructed = _set_inventory( - host_ip=entered_ip, - platform=platform, - port=kwargs["csv_file"][entered_ip]["port"], - secrets_group=secrets_group, - ) - else: - single_host_inventory_constructed = _set_inventory( - entered_ip, self.platform, self.port, self.secrets_group - ) - - nr_with_processors.inventory.hosts.update(single_host_inventory_constructed) - nr_with_processors.run(task=netmiko_send_commands, command_getter_job="device_onboarding") - except Exception as err: # pylint: disable=broad-exception-caught - self.logger.error("Error: %s", err) - return err + """Run command getter.""" + compiled_results = command_getter_do(self.job_result, self.logger.getEffectiveLevel(), kwargs) return compiled_results class CommandGetterNetworkImporter(Job): """Simple Job to Execute Show Command.""" - debug = BooleanVar(description="Enable for more verbose logging.") - namespace = ObjectVar( - model=Namespace, required=True, description="The namespace for all IP addresses created or updated in the sync." - ) - devices = MultiObjectVar( - model=Device, - required=False, - description="Device(s) to update.", - ) - location = ObjectVar( - model=Location, - query_params={"content_type": "dcim.device"}, - required=False, - description="Only update devices at a specific location.", - ) - device_role = ObjectVar( - model=Role, - query_params={"content_types": "dcim.device"}, - required=False, - description="Only update devices with the selected role.", - ) + debug = BooleanVar() + namespace = ObjectVar(model=Namespace, required=True) + devices = MultiObjectVar(model=Device, required=False) + location = ObjectVar(model=Location, required=False) + device_role = ObjectVar(model=Role, required=False) port = IntegerVar(default=22) timeout = IntegerVar(default=30) @@ -761,28 +699,8 @@ class Meta: hidden = False def run(self, *args, **kwargs): - """Process onboarding task from ssot-ni job.""" - try: - logger = NornirLogger(self.job_result, log_level=0) - compiled_results = {} - qs = get_job_filter(kwargs) - with InitNornir( - runner=NORNIR_SETTINGS.get("runner"), - logging={"enabled": False}, - inventory={ - "plugin": "nautobot-inventory", - "options": { - "credentials_class": NORNIR_SETTINGS.get("credentials"), - "queryset": qs, - }, - "transform_function": "transform_to_add_command_parser_info", - }, - ) as nornir_obj: - nr_with_processors = nornir_obj.with_processors([ProcessorDO(logger, compiled_results)]) - nr_with_processors.run(task=netmiko_send_commands, command_getter_job="network_importer") - except Exception as err: # pylint: disable=broad-exception-caught - self.logger.info("Error: %s", err) - return err + """Run command getter.""" + compiled_results = command_getter_ni(self.job_result, self.logger.getEffectiveLevel(), kwargs) return compiled_results diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index c2d6c2cb..5e3f50f9 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -1,9 +1,25 @@ -"""Command Getter.""" +"""CommandGetter.""" +# pylint: disable=relative-beyond-top-level +from nautobot_plugin_nornir.constants import NORNIR_SETTINGS +from nautobot_plugin_nornir.plugins.inventory.nautobot_orm import NautobotORMInventory +from nornir import InitNornir +from nornir.core.plugins.inventory import InventoryPluginRegister, TransformFunctionRegister from nornir.core.task import Result, Task from nornir_netmiko.tasks import netmiko_send_command +from nautobot.extras.models import SecretsGroup +from nautobot.dcim.models import Platform from nautobot_device_onboarding.constants import NETMIKO_TO_NAPALM_STATIC +from nautobot_device_onboarding.nornir_plays.empty_inventory import EmptyInventory +from nautobot_device_onboarding.nornir_plays.logger import NornirLogger +from nautobot_device_onboarding.nornir_plays.processor import ProcessorDO +from nautobot_device_onboarding.utils.helper import add_platform_parsing_info, get_job_filter +from nautobot_device_onboarding.utils.inventory_creator import _set_inventory + +InventoryPluginRegister.register("nautobot-inventory", NautobotORMInventory) +InventoryPluginRegister.register("empty-inventory", EmptyInventory) +TransformFunctionRegister.register("transform_to_add_command_parser_info", add_platform_parsing_info) def _get_commands_to_run(yaml_parsed_info, command_getter_job): @@ -32,3 +48,84 @@ def netmiko_send_commands(task: Task, command_getter_job: str): use_textfsm=command_use_textfsm, read_timeout=60, ) + + +def command_getter_do(job_result, log_level, kwargs): + """Nornir play to run show commands.""" + logger = NornirLogger(job_result, log_level) + + if kwargs["csv_file"]: # ip_addreses will be keys in a dict + ip_addresses = [] + for ip_address in kwargs["csv_file"]: + ip_addresses.append(ip_address) + else: + ip_addresses = kwargs["ip_addresses"].replace(" ", "").split(",") + port = kwargs["port"] + timeout = kwargs["timeout"] + secrets_group = kwargs["secrets_group"] + platform = kwargs["platform"] + # Initiate Nornir instance with empty inventory + try: + logger = NornirLogger(job_result, log_level=0) + compiled_results = {} + with InitNornir( + runner=NORNIR_SETTINGS.get("runner"), + logging={"enabled": False}, + inventory={ + "plugin": "empty-inventory", + }, + ) as nornir_obj: + nr_with_processors = nornir_obj.with_processors([ProcessorDO(logger, compiled_results)]) + for entered_ip in ip_addresses: + if kwargs["csv_file"]: + # get platform if one was provided via csv + platform = None + platform_id = kwargs["csv_file"][entered_ip]["platform"] + if platform_id: + platform = Platform.objects.get(id=platform_id) + + # get secrets group if one was provided via csv + secrets_group = None + secrets_group_id = kwargs["csv_file"][entered_ip]["secrets_group"] + if secrets_group_id: + secrets_group = SecretsGroup.objects.get(id=secrets_group_id) + single_host_inventory_constructed = _set_inventory( + host_ip=entered_ip, + platform=platform, + port=kwargs["csv_file"][entered_ip]["port"], + secrets_group=secrets_group, + ) + else: + single_host_inventory_constructed = _set_inventory(entered_ip, platform, port, secrets_group) + nr_with_processors.inventory.hosts.update(single_host_inventory_constructed) + nr_with_processors.run(task=netmiko_send_commands, command_getter_job="device_onboarding") + except Exception as err: # pylint: disable=broad-exception-caught + logger.error("Error: %s", err) + return err + return compiled_results + + +def command_getter_ni(job_result, log_level, kwargs): + """Process onboarding task from ssot-ni job.""" + logger = NornirLogger(job_result, log_level) + try: + compiled_results = {} + qs = get_job_filter(kwargs) + with InitNornir( + runner=NORNIR_SETTINGS.get("runner"), + logging={"enabled": False}, + inventory={ + "plugin": "nautobot-inventory", + "options": { + "credentials_class": NORNIR_SETTINGS.get("credentials"), + "queryset": qs, + }, + "transform_function": "transform_to_add_command_parser_info", + }, + ) as nornir_obj: + nr_with_processors = nornir_obj.with_processors([ProcessorDO(logger, compiled_results)]) + nr_with_processors.run(task=netmiko_send_commands, command_getter_job="network_importer") + except Exception as err: # pylint: disable=broad-exception-caught + logger.info("Error: %s", err) + return err + return compiled_results diff --git a/nautobot_device_onboarding/utils/diffsync_utils.py b/nautobot_device_onboarding/utils/diffsync_utils.py index acb1de07..3c819bbd 100644 --- a/nautobot_device_onboarding/utils/diffsync_utils.py +++ b/nautobot_device_onboarding/utils/diffsync_utils.py @@ -104,4 +104,4 @@ def retrieve_submitted_value(job, ip_address, query_string): if job.processed_csv_data: return job.processed_csv_data[ip_address][query_string] else: - return getattr(job, query_string) + return getattr(job, query_string) diff --git a/nautobot_device_onboarding/utils/inventory_creator.py b/nautobot_device_onboarding/utils/inventory_creator.py index e297a48d..9b6e9ea8 100755 --- a/nautobot_device_onboarding/utils/inventory_creator.py +++ b/nautobot_device_onboarding/utils/inventory_creator.py @@ -57,7 +57,7 @@ def guess_netmiko_device_type(hostname, username, password, port): except Exception as err: print(err) - print(f"************************Guessed device type: {guessed_device_type}") + print(f"{hostname} - guessed platform: {guessed_device_type}") return guessed_device_type @@ -73,8 +73,6 @@ def _set_inventory(host_ip, platform, port, secrets_group): parsing_info = _get_platform_parsing_info(platform) else: parsing_info = {} - print(parsing_info) - print(type(parsing_info)) host = Host( name=host_ip, From 584f41af1157dbd18c96954db74bad29a516a5b6 Mon Sep 17 00:00:00 2001 From: David Cates Date: Mon, 26 Feb 2024 14:19:58 -0700 Subject: [PATCH 125/225] update NI --- .../diffsync/models/network_importer_models.py | 6 +----- nautobot_device_onboarding/jobs.py | 12 ++++++++---- .../nornir_plays/command_getter.py | 9 ++++++--- nautobot_device_onboarding/nornir_plays/processor.py | 6 ++++-- 4 files changed, 19 insertions(+), 14 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/models/network_importer_models.py b/nautobot_device_onboarding/diffsync/models/network_importer_models.py index caca975f..023c2c7e 100644 --- a/nautobot_device_onboarding/diffsync/models/network_importer_models.py +++ b/nautobot_device_onboarding/diffsync/models/network_importer_models.py @@ -61,11 +61,7 @@ def _get_queryset(cls, diffsync: "DiffSync"): job.command_getter_result contains the result from the CommandGetter job. Only devices that actually responded with data should be considered for the sync. """ - if diffsync.job.filtered_devices: - return diffsync.job.devices_to_load - else: - diffsync.job.logger.error("No device filter options were provided, no devices will be synced.") - return cls._model.objects.none() + return diffsync.job.devices_to_load @classmethod def create(cls, diffsync, ids, attrs): diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index b8861513..c61ad56c 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -412,8 +412,10 @@ class Meta: def load_source_adapter(self): """Load onboarding network adapter.""" - self.source_adapter = NetworkImporterNetworkAdapter(job=self, sync=self.sync) - self.source_adapter.load() + # do not load source data if the job form does not filter which devices to sync + if self.filtered_devices: + self.source_adapter = NetworkImporterNetworkAdapter(job=self, sync=self.sync) + self.source_adapter.load() def load_target_adapter(self): """Load onboarding Nautobot adapter.""" @@ -457,8 +459,10 @@ def run( device_filter["location"] = location if self.device_role: device_filter["role"] = device_role - - self.filtered_devices = Device.objects.filter(**device_filter) + if device_filter: # prevent all devices from being returned by an empty filter + self.filtered_devices = Device.objects.filter(**device_filter) + else: + self.logger.error("No device filter options were provided, no devices will be synced.") self.job_result.task_kwargs = { "debug": debug, diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index f9330315..96660e7e 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -67,7 +67,7 @@ def command_getter_do(job_result, log_level, kwargs): "plugin": "empty-inventory", }, ) as nornir_obj: - nr_with_processors = nornir_obj.with_processors([ProcessorDO(logger, compiled_results)]) + nr_with_processors = nornir_obj.with_processors([ProcessorDO(logger, compiled_results, kwargs)]) for entered_ip in ip_addresses: single_host_inventory_constructed = _set_inventory(entered_ip, platform, port, secrets_group) nr_with_processors.inventory.hosts.update(single_host_inventory_constructed) @@ -83,7 +83,10 @@ def command_getter_ni(job_result, log_level, kwargs): logger = NornirLogger(job_result, log_level) try: compiled_results = {} - qs = get_job_filter(kwargs) + # qs = get_job_filter(kwargs) + qs = kwargs["devices"] + if not qs: + return None with InitNornir( runner=NORNIR_SETTINGS.get("runner"), logging={"enabled": False}, @@ -96,7 +99,7 @@ def command_getter_ni(job_result, log_level, kwargs): "transform_function": "transform_to_add_command_parser_info", }, ) as nornir_obj: - nr_with_processors = nornir_obj.with_processors([ProcessorDO(logger, compiled_results)]) + nr_with_processors = nornir_obj.with_processors([ProcessorDO(logger, compiled_results, kwargs)]) nr_with_processors.run(task=netmiko_send_commands, command_getter_job="network_importer") except Exception as err: # pylint: disable=broad-exception-caught logger.info("Error: %s", err) diff --git a/nautobot_device_onboarding/nornir_plays/processor.py b/nautobot_device_onboarding/nornir_plays/processor.py index d6c96e68..8c6825b7 100755 --- a/nautobot_device_onboarding/nornir_plays/processor.py +++ b/nautobot_device_onboarding/nornir_plays/processor.py @@ -13,10 +13,11 @@ class ProcessorDO(BaseLoggingProcessor): """Processor class for Device Onboarding jobs.""" - def __init__(self, logger, command_outputs): + def __init__(self, logger, command_outputs, kwargs): """Set logging facility.""" self.logger = logger self.data: Dict = command_outputs + self.kwargs = kwargs def task_instance_started(self, task: Task, host: Host) -> None: """Processor for logging and data processing on task start.""" @@ -61,7 +62,8 @@ def task_instance_completed(self, task: Task, host: Host, result: MultiResult) - def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult) -> None: """Processor for logging and data processing on subtask completed.""" self.logger.info(f"subtask_instance_completed Subtask completed {task.name}.", extra={"object": task.host}) - self.logger.info(f"subtask_instance_completed Subtask result {result.result}.", extra={"object": task.host}) + if self.kwargs["debug"]: + self.logger.info(f"subtask_instance_completed Subtask result {result.result}.", extra={"object": task.host}) self.data[host.name].update( { From 922e5598c880f7d9a41ad76795c52f6838225175 Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Mon, 26 Feb 2024 21:24:02 +0000 Subject: [PATCH 126/225] updates for description and enabled --- .../diffsync/adapters/network_importer_adapters.py | 2 +- .../diffsync/models/network_importer_models.py | 4 ++++ nautobot_device_onboarding/jobs.py | 2 +- nautobot_device_onboarding/utils/diffsync_utils.py | 2 +- 4 files changed, 7 insertions(+), 3 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index b71fceb5..32b33e57 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -225,7 +225,7 @@ def _handle_failed_devices(self, device_data): if failed_devices: self.job.logger.warning(f"Failed devices: {failed_devices}") self.job.command_getter_result = device_data - self.job.devices_to_load = diffsync_utils.generate_device_querset_from_command_getter_result(device_data) + self.job.devices_to_load = diffsync_utils.generate_device_queryset_from_command_getter_result(device_data) def execute_command_getter(self): """Start the CommandGetterDO job to query devices for data.""" diff --git a/nautobot_device_onboarding/diffsync/models/network_importer_models.py b/nautobot_device_onboarding/diffsync/models/network_importer_models.py index 023c2c7e..169afa03 100644 --- a/nautobot_device_onboarding/diffsync/models/network_importer_models.py +++ b/nautobot_device_onboarding/diffsync/models/network_importer_models.py @@ -101,6 +101,8 @@ class NetworkImporterInterface(FilteredNautobotModel): # "parent_interface__name", "mode", "untagged_vlan__name", + "enabled", + "description", ) device__name: str @@ -114,6 +116,8 @@ class NetworkImporterInterface(FilteredNautobotModel): lag__name: Optional[str] mode: Optional[str] untagged_vlan__name: Optional[str] + enabled: Optional[bool] + description: Optional[str] class NetworkImporterIPAddress(DiffSyncModel): diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index c61ad56c..98199a05 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -351,7 +351,7 @@ def __init__(self): # from nautobot_device_onboarding.diffsync import mock_data # from nautobot_device_onboarding.utils import diffsync_utils # self.command_getter_result = mock_data.network_importer_mock_data - # self.devices_to_load = diffsync_utils.generate_device_querset_from_command_getter_result(mock_data.network_importer_mock_data) + # self.devices_to_load = diffsync_utils.generate_device_queryset_from_command_getter_result(mock_data.network_importer_mock_data) ################### REMOVE WHEN NOT TESTING ################################### ############ RESTORE THESE LINES WHEN NOT TESTING! ############################ diff --git a/nautobot_device_onboarding/utils/diffsync_utils.py b/nautobot_device_onboarding/utils/diffsync_utils.py index 4c927efe..66edd2be 100644 --- a/nautobot_device_onboarding/utils/diffsync_utils.py +++ b/nautobot_device_onboarding/utils/diffsync_utils.py @@ -8,7 +8,7 @@ from nautobot.ipam.models import IPAddress, Prefix -def generate_device_querset_from_command_getter_result(command_getter_result): +def generate_device_queryset_from_command_getter_result(command_getter_result): """Generate a Nautobot device queryset based on data returned from CommandGetter.""" devices_to_sync_hostnames = [] devices_to_sync_serial_numbers = [] From 9ec6a01bb7d158e75636a84c0b5b1a845bb487d5 Mon Sep 17 00:00:00 2001 From: David Cates Date: Mon, 26 Feb 2024 14:34:46 -0700 Subject: [PATCH 127/225] update mock data --- .../diffsync/mock_data.py | 38 +++++++++---------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/mock_data.py b/nautobot_device_onboarding/diffsync/mock_data.py index ffaa19d3..c8b41560 100644 --- a/nautobot_device_onboarding/diffsync/mock_data.py +++ b/nautobot_device_onboarding/diffsync/mock_data.py @@ -10,12 +10,12 @@ "GigabitEthernet1": { "type": "100base-tx", "ip_addresses": [ - {"host": "10.1.1.8", "mask_length": 16}, + {"ip_address": "10.1.1.8", "prefix_length": 16}, ], "mac_address": "d8b1.905c.7130", "mtu": "1500", "description": "", - "enabled": True, + "link_status": True, "802.1Q_mode": "tagged", "lag": "", "untagged_vlan": {"name": "vlan60", "id": "60"}, @@ -26,12 +26,12 @@ "GigabitEthernet2": { "type": "100base-tx", "ip_addresses": [ - {"host": "10.1.1.9", "mask_length": 24}, + {"ip_address": "10.1.1.9", "prefix_length": 24}, ], "mac_address": "d8b1.905c.7131", "mtu": "1500", "description": "uplink Po1", - "enabled": True, + "link_status": True, "802.1Q_mode": "", "lag": "Po2", "untagged_vlan": "", @@ -42,13 +42,13 @@ "GigabitEthernet3": { "type": "100base-tx", "ip_addresses": [ - {"host": "10.1.1.10", "mask_length": 24}, - {"host": "10.1.1.11", "mask_length": 22}, + {"ip_address": "10.1.1.10", "prefix_length": 24}, + {"ip_address": "10.1.1.11", "prefix_length": 22}, ], "mac_address": "d8b1.905c.7132", "mtu": "1500", "description": "", - "enabled": True, + "link_status": True, "802.1Q_mode": "tagged", "lag": "Po1", "untagged_vlan": "", @@ -59,12 +59,12 @@ "GigabitEthernet4": { "type": "100base-tx", "ip_addresses": [ - {"host": "10.1.1.12", "mask_length": 20}, + {"ip_address": "10.1.1.12", "prefix_length": 20}, ], "mac_address": "d8b1.905c.7133", "mtu": "1500", "description": "", - "enabled": True, + "link_status": True, "802.1Q_mode": "", "lag": "", "untagged_vlan": "", @@ -78,7 +78,7 @@ "mac_address": "d8b1.905c.7134", "mtu": "1500", "description": "", - "enabled": True, + "link_status": True, "802.1Q_mode": "", "lag": "", "untagged_vlan": "", @@ -92,7 +92,7 @@ "mac_address": "d8b1.905c.7135", "mtu": "1500", "description": "", - "enabled": True, + "link_status": True, "802.1Q_mode": "", "lag": "", "untagged_vlan": "", @@ -108,12 +108,12 @@ "GigabitEthernet1": { "type": "100base-tx", "ip_addresses": [ - {"host": "10.1.2.8", "mask_length": 24}, + {"ip_address": "10.1.2.8", "prefix_length": 24}, ], "mac_address": "d8b1.905c.5170", "mtu": "1500", "description": "", - "enabled": True, + "link_status": True, "802.1Q_mode": "tagged", "lag": "", "untagged_vlan": {"name": "vlan60", "id": "60"}, @@ -124,12 +124,12 @@ "GigabitEthernet2": { "type": "100base-tx", "ip_addresses": [ - {"host": "10.1.2.9", "mask_length": 24}, + {"ip_address": "10.1.2.9", "prefix_length": 24}, ], "mac_address": "d8b1.905c.5171", "mtu": "1500", "description": "uplink Po1", - "enabled": True, + "link_status": True, "802.1Q_mode": "", "lag": "Po1", "untagged_vlan": "", @@ -140,13 +140,13 @@ "GigabitEthernet3": { "type": "100base-tx", "ip_addresses": [ - {"host": "10.1.2.10", "mask_length": 24}, - {"host": "10.1.2.11", "mask_length": 22}, + {"ip_address": "10.1.2.10", "prefix_length": 24}, + {"ip_address": "10.1.2.11", "prefix_length": 22}, ], "mac_address": "d8b1.905c.5172", "mtu": "1500", "description": "", - "enabled": True, + "link_status": True, "802.1Q_mode": "tagged", "lag": "Po1", "untagged_vlan": "", @@ -160,7 +160,7 @@ "mac_address": "d8b1.905c.5173", "mtu": "1500", "description": "", - "enabled": True, + "link_status": True, "802.1Q_mode": "", "lag": "", "untagged_vlan": "", From 22b2de2e6e88709ad7389e151265d33d06d1ed18 Mon Sep 17 00:00:00 2001 From: David Cates Date: Mon, 26 Feb 2024 14:44:06 -0700 Subject: [PATCH 128/225] formatting --- .../adapters/network_importer_adapters.py | 8 +++++--- .../diffsync/adapters/onboarding_adapters.py | 1 + nautobot_device_onboarding/jobs.py | 13 +++++++------ .../nornir_plays/command_getter.py | 15 ++++++++------- nautobot_device_onboarding/utils/formatter.py | 2 +- .../utils/inventory_creator.py | 5 +++-- 6 files changed, 25 insertions(+), 19 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index 32b33e57..db90b2d9 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -1,16 +1,18 @@ """DiffSync adapters.""" import json + import diffsync from diffsync.enum import DiffSyncModelFlags from django.core.exceptions import ValidationError from nautobot.dcim.models import Interface -from nautobot.ipam.models import IPAddress, VLAN +from nautobot.ipam.models import VLAN, IPAddress +from nautobot_ssot.contrib import NautobotAdapter +from netaddr import EUI, mac_unix_expanded + from nautobot_device_onboarding.diffsync.models import network_importer_models from nautobot_device_onboarding.nornir_plays.command_getter import command_getter_ni from nautobot_device_onboarding.utils import diffsync_utils -from nautobot_ssot.contrib import NautobotAdapter -from netaddr import EUI, mac_unix_expanded class FilteredNautobotAdapter(NautobotAdapter): diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index b2a684eb..4856f172 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -9,6 +9,7 @@ from django.core.exceptions import ValidationError from django.db.models import Model from nautobot.dcim.models import Device, DeviceType, Manufacturer, Platform + from nautobot_device_onboarding.diffsync.models import onboarding_models from nautobot_device_onboarding.nornir_plays.command_getter import command_getter_do from nautobot_device_onboarding.utils import diffsync_utils diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 98199a05..11be4ebd 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -11,6 +11,8 @@ from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from nautobot.extras.models import Role, SecretsGroup, SecretsGroupAssociation, Status from nautobot.ipam.models import Namespace +from nautobot_ssot.jobs.base import DataSource + from nautobot_device_onboarding.diffsync.adapters.network_importer_adapters import ( NetworkImporterNautobotAdapter, NetworkImporterNetworkAdapter, @@ -23,7 +25,6 @@ from nautobot_device_onboarding.helpers import onboarding_task_fqdn_to_ip from nautobot_device_onboarding.netdev_keeper import NetdevKeeper from nautobot_device_onboarding.nornir_plays.command_getter import command_getter_do, command_getter_ni -from nautobot_ssot.jobs.base import DataSource PLUGIN_SETTINGS = settings.PLUGINS_CONFIG["nautobot_device_onboarding"] @@ -347,14 +348,14 @@ def __init__(self): super().__init__() self.filtered_devices = None # Queryset of devices based on form inputs - #################### FOR TESTING ONLY ######################################### + # FOR TESTING ONLY # # from nautobot_device_onboarding.diffsync import mock_data # from nautobot_device_onboarding.utils import diffsync_utils # self.command_getter_result = mock_data.network_importer_mock_data # self.devices_to_load = diffsync_utils.generate_device_queryset_from_command_getter_result(mock_data.network_importer_mock_data) - ################### REMOVE WHEN NOT TESTING ################################### + # REMOVE WHEN NOT TESTING # - ############ RESTORE THESE LINES WHEN NOT TESTING! ############################ + # RESTORE THESE LINES WHEN NOT TESTING! # self.command_getter_result = None # Dict result from CommandGetter job self.devices_to_load = None # Queryset consisting of devices that responded @@ -413,7 +414,7 @@ class Meta: def load_source_adapter(self): """Load onboarding network adapter.""" # do not load source data if the job form does not filter which devices to sync - if self.filtered_devices: + if self.filtered_devices: self.source_adapter = NetworkImporterNetworkAdapter(job=self, sync=self.sync) self.source_adapter.load() @@ -459,7 +460,7 @@ def run( device_filter["location"] = location if self.device_role: device_filter["role"] = device_role - if device_filter: # prevent all devices from being returned by an empty filter + if device_filter: # prevent all devices from being returned by an empty filter self.filtered_devices = Device.objects.filter(**device_filter) else: self.logger.error("No device filter options were provided, no devices will be synced.") diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index 96660e7e..3ce57956 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -1,12 +1,6 @@ """Nornir job for backing up actual config.""" # pylint: disable=relative-beyond-top-level -from nautobot_device_onboarding.constants import NETMIKO_TO_NAPALM_STATIC -from nautobot_device_onboarding.nornir_plays.empty_inventory import EmptyInventory -from nautobot_device_onboarding.nornir_plays.logger import NornirLogger -from nautobot_device_onboarding.nornir_plays.processor import ProcessorDO -from nautobot_device_onboarding.utils.helper import add_platform_parsing_info, get_job_filter -from nautobot_device_onboarding.utils.inventory_creator import _set_inventory from nautobot_plugin_nornir.constants import NORNIR_SETTINGS from nautobot_plugin_nornir.plugins.inventory.nautobot_orm import NautobotORMInventory from nornir import InitNornir @@ -14,6 +8,13 @@ from nornir.core.task import Result, Task from nornir_netmiko.tasks import netmiko_send_command +from nautobot_device_onboarding.constants import NETMIKO_TO_NAPALM_STATIC +from nautobot_device_onboarding.nornir_plays.empty_inventory import EmptyInventory +from nautobot_device_onboarding.nornir_plays.logger import NornirLogger +from nautobot_device_onboarding.nornir_plays.processor import ProcessorDO +from nautobot_device_onboarding.utils.helper import add_platform_parsing_info +from nautobot_device_onboarding.utils.inventory_creator import _set_inventory + InventoryPluginRegister.register("nautobot-inventory", NautobotORMInventory) InventoryPluginRegister.register("empty-inventory", EmptyInventory) TransformFunctionRegister.register("transform_to_add_command_parser_info", add_platform_parsing_info) @@ -53,7 +54,7 @@ def command_getter_do(job_result, log_level, kwargs): ip_addresses = kwargs["ip_addresses"].replace(" ", "").split(",") port = kwargs["port"] - timeout = kwargs["timeout"] + # timeout = kwargs["timeout"] secrets_group = kwargs["secrets_group"] platform = kwargs["platform"] # Initiate Nornir instance with empty inventory diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index 9baeb4e1..d8162c68 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -9,7 +9,7 @@ # from jinja2 import exceptions as jinja_errors from jinja2.sandbox import SandboxedEnvironment -from nautobot.core.utils.data import render_jinja2 +# from nautobot.core.utils.data import render_jinja2 # from nautobot_device_onboarding.exceptions import OnboardException from nautobot_device_onboarding.utils.jinja_filters import fix_interfaces diff --git a/nautobot_device_onboarding/utils/inventory_creator.py b/nautobot_device_onboarding/utils/inventory_creator.py index ab677109..5141e778 100755 --- a/nautobot_device_onboarding/utils/inventory_creator.py +++ b/nautobot_device_onboarding/utils/inventory_creator.py @@ -2,11 +2,12 @@ from django.conf import settings from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices -from nautobot_device_onboarding.exceptions import OnboardException -from nautobot_device_onboarding.utils.helper import _get_platform_parsing_info from netmiko import SSHDetect from nornir.core.inventory import ConnectionOptions, Host +from nautobot_device_onboarding.exceptions import OnboardException +from nautobot_device_onboarding.utils.helper import _get_platform_parsing_info + def _parse_credentials(credentials): """Parse and return dictionary of credentials.""" From 2cbb9dca867d65c9aa73a7cd8664c088f409f763 Mon Sep 17 00:00:00 2001 From: David Cates Date: Mon, 26 Feb 2024 14:45:37 -0700 Subject: [PATCH 129/225] black --- nautobot_device_onboarding/utils/formatter.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index d8162c68..eab8c629 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -9,6 +9,7 @@ # from jinja2 import exceptions as jinja_errors from jinja2.sandbox import SandboxedEnvironment + # from nautobot.core.utils.data import render_jinja2 # from nautobot_device_onboarding.exceptions import OnboardException From becbeb5384fcb905854a872eeaf7d85889264cff Mon Sep 17 00:00:00 2001 From: David Cates Date: Mon, 26 Feb 2024 14:50:46 -0700 Subject: [PATCH 130/225] pylint --- .../diffsync/adapters/network_importer_adapters.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index db90b2d9..76fdb2e0 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -253,8 +253,7 @@ def _process_mac_address(self, mac_address): """Convert a mac address to match the value stored by Nautobot.""" if mac_address: return str(EUI(mac_address, version=48, dialect=MacUnixExpandedUppercase)) - else: - return "" + return "" def load_devices(self): """Load devices into the DiffSync store.""" @@ -292,7 +291,7 @@ def load_interface(self, hostname, interface_name, interface_data): def load_ip_addresses(self): """Load IP addresses into the DiffSync store.""" - for hostname, device_data in self.job.command_getter_result.items(): + for hostname, device_data in self.job.command_getter_result.items(): # pylint: disable=too-many-nested-blocks for interface in json.loads(device_data["interfaces"]): for interface_name, interface_data in interface.items(): for ip_address in interface_data["ip_addresses"]: @@ -323,7 +322,7 @@ def load_vlans(self): for device in self.job.devices_to_load: location_names[device.name] = device.location.name - for hostname, device_data in self.job.command_getter_result.items(): + for hostname, device_data in self.job.command_getter_result.items(): # pylint: disable=too-many-nested-blocks for interface in json.loads(device_data["interfaces"]): for _, interface_data in interface.items(): # add tagged vlans @@ -357,7 +356,7 @@ def load_vlans(self): def load_ip_address_to_interfaces(self): """Load ip address interface assignments into the Diffsync store.""" - for hostname, device_data in self.job.command_getter_result.items(): + for hostname, device_data in self.job.command_getter_result.items(): # pylint: disable=too-many-nested-blocks for interface in json.loads(device_data["interfaces"]): for interface_name, interface_data in interface.items(): for ip_address in interface_data["ip_addresses"]: From 0712eea14122c3b7d8a75b6c098433dc97a39193 Mon Sep 17 00:00:00 2001 From: David Cates Date: Mon, 26 Feb 2024 14:52:19 -0700 Subject: [PATCH 131/225] update logging message --- nautobot_device_onboarding/utils/inventory_creator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/utils/inventory_creator.py b/nautobot_device_onboarding/utils/inventory_creator.py index 5141e778..9b6e9ea8 100755 --- a/nautobot_device_onboarding/utils/inventory_creator.py +++ b/nautobot_device_onboarding/utils/inventory_creator.py @@ -57,7 +57,7 @@ def guess_netmiko_device_type(hostname, username, password, port): except Exception as err: print(err) - print(f"************************Guessed device type: {guessed_device_type}") + print(f"{hostname} - guessed platform: {guessed_device_type}") return guessed_device_type From d6f64a19d39b68964e99f1ce4a386c44cae51843 Mon Sep 17 00:00:00 2001 From: David Cates Date: Mon, 26 Feb 2024 14:57:35 -0700 Subject: [PATCH 132/225] flake8 --- .../diffsync/adapters/network_importer_adapters.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index 76fdb2e0..eccaf579 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -291,7 +291,7 @@ def load_interface(self, hostname, interface_name, interface_data): def load_ip_addresses(self): """Load IP addresses into the DiffSync store.""" - for hostname, device_data in self.job.command_getter_result.items(): # pylint: disable=too-many-nested-blocks + for hostname, device_data in self.job.command_getter_result.items(): # pylint: disable=too-many-nested-blocks for interface in json.loads(device_data["interfaces"]): for interface_name, interface_data in interface.items(): for ip_address in interface_data["ip_addresses"]: @@ -322,7 +322,7 @@ def load_vlans(self): for device in self.job.devices_to_load: location_names[device.name] = device.location.name - for hostname, device_data in self.job.command_getter_result.items(): # pylint: disable=too-many-nested-blocks + for hostname, device_data in self.job.command_getter_result.items(): # pylint: disable=too-many-nested-blocks for interface in json.loads(device_data["interfaces"]): for _, interface_data in interface.items(): # add tagged vlans @@ -356,7 +356,7 @@ def load_vlans(self): def load_ip_address_to_interfaces(self): """Load ip address interface assignments into the Diffsync store.""" - for hostname, device_data in self.job.command_getter_result.items(): # pylint: disable=too-many-nested-blocks + for hostname, device_data in self.job.command_getter_result.items(): # pylint: disable=too-many-nested-blocks for interface in json.loads(device_data["interfaces"]): for interface_name, interface_data in interface.items(): for ip_address in interface_data["ip_addresses"]: From 41f02774d30e54aa620e8fa13b180c35289b4568 Mon Sep 17 00:00:00 2001 From: David Cates Date: Mon, 26 Feb 2024 16:05:30 -0700 Subject: [PATCH 133/225] update CSV support --- .../diffsync/models/onboarding_models.py | 36 ++++++++----------- .../utils/diffsync_utils.py | 4 +-- 2 files changed, 16 insertions(+), 24 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/models/onboarding_models.py b/nautobot_device_onboarding/diffsync/models/onboarding_models.py index 37c3796c..0484a18f 100644 --- a/nautobot_device_onboarding/diffsync/models/onboarding_models.py +++ b/nautobot_device_onboarding/diffsync/models/onboarding_models.py @@ -135,27 +135,19 @@ def _get_or_create_interface(cls, diffsync, device, attrs): @classmethod def _update_device_with_attrs(cls, device, platform, ids, attrs, diffsync): """Update a Nautobot device instance.""" - device.location = ( - diffsync_utils.retrieve_submitted_value( - job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="location" - ), + device.location =diffsync_utils.retrieve_submitted_value( + job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="location", ) - device.status = ( - diffsync_utils.retrieve_submitted_value( - job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="device_status" - ), + device.status = diffsync_utils.retrieve_submitted_value( + job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="device_status", ) - device.role = ( - diffsync_utils.retrieve_submitted_value( - job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="device_role" - ), + device.role = diffsync_utils.retrieve_submitted_value( + job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="device_role", ) device.device_type = DeviceType.objects.get(model=attrs["device_type__model"]) device.platform = platform - device.secrets_group = ( - diffsync_utils.retrieve_submitted_value( - job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="secrets_group" - ), + device.secrets_group = diffsync_utils.retrieve_submitted_value( + job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="secrets_group", ) device.serial = ids["serial"] @@ -231,13 +223,13 @@ def update(self, attrs): ip_address = diffsync_utils.get_or_create_ip_address( host=attrs["primary_ip4__host"], mask_length=attrs["mask_length"], - namespace=self.diffsync_utils.retrieve_submitted_value( + namespace=diffsync_utils.retrieve_submitted_value( job=self.job, ip_address=attrs["primary_ip4__host"], query_string="namespace" ), - default_ip_status=self.diffsync_utils.retrieve_submitted_value( + default_ip_status=diffsync_utils.retrieve_submitted_value( job=self.job, ip_address=attrs["primary_ip4__host"], query_string="ip_address_status" ), - default_prefix_status=self.diffsync_utils.retrieve_submitted_value( + default_prefix_status=diffsync_utils.retrieve_submitted_value( job=self.job, ip_address=attrs["primary_ip4__host"], query_string="ip_address_status" ), job=self.diffsync.job, @@ -281,13 +273,13 @@ def update(self, attrs): ip_address = diffsync_utils.get_or_create_ip_address( host=attrs["primary_ip4__host"], mask_length=attrs["mask_length"], - namespace=self.diffsync_utils.retrieve_submitted_value( + namespace=diffsync_utils.retrieve_submitted_value( job=self.job, ip_address=attrs["primary_ip4__host"], query_string="namespace" ), - default_ip_status=self.diffsync_utils.retrieve_submitted_value( + default_ip_status=diffsync_utils.retrieve_submitted_value( job=self.job, ip_address=attrs["primary_ip4__host"], query_string="ip_address_status" ), - default_prefix_status=self.diffsync_utils.retrieve_submitted_value( + default_prefix_status=diffsync_utils.retrieve_submitted_value( job=self.job, ip_address=attrs["primary_ip4__host"], query_string="ip_address_status" ), job=self.diffsync.job, diff --git a/nautobot_device_onboarding/utils/diffsync_utils.py b/nautobot_device_onboarding/utils/diffsync_utils.py index 04b1eb21..947a9505 100644 --- a/nautobot_device_onboarding/utils/diffsync_utils.py +++ b/nautobot_device_onboarding/utils/diffsync_utils.py @@ -98,9 +98,9 @@ def retrieve_submitted_value(job, ip_address, query_string): that was parsed when the file was loaded. If a CSV file has not been submitted, return the value input into the job form. """ - if job.debug: - job.logger.debug(f"Retrieving {query_string} for {ip_address}") if job.processed_csv_data: + if job.debug: + job.logger.debug(f"Retrieved csv value: {query_string} for {ip_address}") return job.processed_csv_data[ip_address][query_string] else: return getattr(job, query_string) From 8fa95bce1957499299016388214305a58bf5a23a Mon Sep 17 00:00:00 2001 From: David Cates Date: Mon, 26 Feb 2024 16:24:12 -0700 Subject: [PATCH 134/225] update lock file --- poetry.lock | 236 +++++++++++++++++++++++++++++----------------------- 1 file changed, 133 insertions(+), 103 deletions(-) diff --git a/poetry.lock b/poetry.lock index d8276873..3eb6e194 100755 --- a/poetry.lock +++ b/poetry.lock @@ -112,6 +112,21 @@ six = ">=1.12.0" astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] +[[package]] +name = "astunparse" +version = "1.6.3" +description = "An AST unparser for Python" +optional = false +python-versions = "*" +files = [ + {file = "astunparse-1.6.3-py2.py3-none-any.whl", hash = "sha256:c2652417f2c8b5bb325c885ae329bdf3f86424075c4fd1a128674bc6fba4b8e8"}, + {file = "astunparse-1.6.3.tar.gz", hash = "sha256:5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872"}, +] + +[package.dependencies] +six = ">=1.6.1,<2.0" +wheel = ">=0.23.0,<1.0" + [[package]] name = "async-timeout" version = "4.0.3" @@ -642,63 +657,63 @@ files = [ [[package]] name = "coverage" -version = "7.4.2" +version = "7.4.3" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf54c3e089179d9d23900e3efc86d46e4431188d9a657f345410eecdd0151f50"}, - {file = "coverage-7.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fe6e43c8b510719b48af7db9631b5fbac910ade4bd90e6378c85ac5ac706382c"}, - {file = "coverage-7.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b98c89db1b150d851a7840142d60d01d07677a18f0f46836e691c38134ed18b"}, - {file = "coverage-7.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5f9683be6a5b19cd776ee4e2f2ffb411424819c69afab6b2db3a0a364ec6642"}, - {file = "coverage-7.4.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78cdcbf7b9cb83fe047ee09298e25b1cd1636824067166dc97ad0543b079d22f"}, - {file = "coverage-7.4.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2599972b21911111114100d362aea9e70a88b258400672626efa2b9e2179609c"}, - {file = "coverage-7.4.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ef00d31b7569ed3cb2036f26565f1984b9fc08541731ce01012b02a4c238bf03"}, - {file = "coverage-7.4.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:20a875bfd8c282985c4720c32aa05056f77a68e6d8bbc5fe8632c5860ee0b49b"}, - {file = "coverage-7.4.2-cp310-cp310-win32.whl", hash = "sha256:b3f2b1eb229f23c82898eedfc3296137cf1f16bb145ceab3edfd17cbde273fb7"}, - {file = "coverage-7.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7df95fdd1432a5d2675ce630fef5f239939e2b3610fe2f2b5bf21fa505256fa3"}, - {file = "coverage-7.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8ddbd158e069dded57738ea69b9744525181e99974c899b39f75b2b29a624e2"}, - {file = "coverage-7.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81a5fb41b0d24447a47543b749adc34d45a2cf77b48ca74e5bf3de60a7bd9edc"}, - {file = "coverage-7.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2412e98e70f16243be41d20836abd5f3f32edef07cbf8f407f1b6e1ceae783ac"}, - {file = "coverage-7.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb79414c15c6f03f56cc68fa06994f047cf20207c31b5dad3f6bab54a0f66ef"}, - {file = "coverage-7.4.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf89ab85027427d351f1de918aff4b43f4eb5f33aff6835ed30322a86ac29c9e"}, - {file = "coverage-7.4.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a178b7b1ac0f1530bb28d2e51f88c0bab3e5949835851a60dda80bff6052510c"}, - {file = "coverage-7.4.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:06fe398145a2e91edaf1ab4eee66149c6776c6b25b136f4a86fcbbb09512fd10"}, - {file = "coverage-7.4.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:18cac867950943fe93d6cd56a67eb7dcd2d4a781a40f4c1e25d6f1ed98721a55"}, - {file = "coverage-7.4.2-cp311-cp311-win32.whl", hash = "sha256:f72cdd2586f9a769570d4b5714a3837b3a59a53b096bb954f1811f6a0afad305"}, - {file = "coverage-7.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:d779a48fac416387dd5673fc5b2d6bd903ed903faaa3247dc1865c65eaa5a93e"}, - {file = "coverage-7.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:adbdfcda2469d188d79771d5696dc54fab98a16d2ef7e0875013b5f56a251047"}, - {file = "coverage-7.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ac4bab32f396b03ebecfcf2971668da9275b3bb5f81b3b6ba96622f4ef3f6e17"}, - {file = "coverage-7.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:006d220ba2e1a45f1de083d5022d4955abb0aedd78904cd5a779b955b019ec73"}, - {file = "coverage-7.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3733545eb294e5ad274abe131d1e7e7de4ba17a144505c12feca48803fea5f64"}, - {file = "coverage-7.4.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42a9e754aa250fe61f0f99986399cec086d7e7a01dd82fd863a20af34cbce962"}, - {file = "coverage-7.4.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2ed37e16cf35c8d6e0b430254574b8edd242a367a1b1531bd1adc99c6a5e00fe"}, - {file = "coverage-7.4.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:b953275d4edfab6cc0ed7139fa773dfb89e81fee1569a932f6020ce7c6da0e8f"}, - {file = "coverage-7.4.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32b4ab7e6c924f945cbae5392832e93e4ceb81483fd6dc4aa8fb1a97b9d3e0e1"}, - {file = "coverage-7.4.2-cp312-cp312-win32.whl", hash = "sha256:f5df76c58977bc35a49515b2fbba84a1d952ff0ec784a4070334dfbec28a2def"}, - {file = "coverage-7.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:34423abbaad70fea9d0164add189eabaea679068ebdf693baa5c02d03e7db244"}, - {file = "coverage-7.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5b11f9c6587668e495cc7365f85c93bed34c3a81f9f08b0920b87a89acc13469"}, - {file = "coverage-7.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:51593a1f05c39332f623d64d910445fdec3d2ac2d96b37ce7f331882d5678ddf"}, - {file = "coverage-7.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69f1665165ba2fe7614e2f0c1aed71e14d83510bf67e2ee13df467d1c08bf1e8"}, - {file = "coverage-7.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3c8bbb95a699c80a167478478efe5e09ad31680931ec280bf2087905e3b95ec"}, - {file = "coverage-7.4.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:175f56572f25e1e1201d2b3e07b71ca4d201bf0b9cb8fad3f1dfae6a4188de86"}, - {file = "coverage-7.4.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8562ca91e8c40864942615b1d0b12289d3e745e6b2da901d133f52f2d510a1e3"}, - {file = "coverage-7.4.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d9a1ef0f173e1a19738f154fb3644f90d0ada56fe6c9b422f992b04266c55d5a"}, - {file = "coverage-7.4.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f40ac873045db4fd98a6f40387d242bde2708a3f8167bd967ccd43ad46394ba2"}, - {file = "coverage-7.4.2-cp38-cp38-win32.whl", hash = "sha256:d1b750a8409bec61caa7824bfd64a8074b6d2d420433f64c161a8335796c7c6b"}, - {file = "coverage-7.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b4ae777bebaed89e3a7e80c4a03fac434a98a8abb5251b2a957d38fe3fd30088"}, - {file = "coverage-7.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ff7f92ae5a456101ca8f48387fd3c56eb96353588e686286f50633a611afc95"}, - {file = "coverage-7.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:861d75402269ffda0b33af94694b8e0703563116b04c681b1832903fac8fd647"}, - {file = "coverage-7.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3507427d83fa961cbd73f11140f4a5ce84208d31756f7238d6257b2d3d868405"}, - {file = "coverage-7.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf711d517e21fb5bc429f5c4308fbc430a8585ff2a43e88540264ae87871e36a"}, - {file = "coverage-7.4.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c00e54f0bd258ab25e7f731ca1d5144b0bf7bec0051abccd2bdcff65fa3262c9"}, - {file = "coverage-7.4.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f8e845d894e39fb53834da826078f6dc1a933b32b1478cf437007367efaf6f6a"}, - {file = "coverage-7.4.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:840456cb1067dc350af9080298c7c2cfdddcedc1cb1e0b30dceecdaf7be1a2d3"}, - {file = "coverage-7.4.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c11ca2df2206a4e3e4c4567f52594637392ed05d7c7fb73b4ea1c658ba560265"}, - {file = "coverage-7.4.2-cp39-cp39-win32.whl", hash = "sha256:3ff5bdb08d8938d336ce4088ca1a1e4b6c8cd3bef8bb3a4c0eb2f37406e49643"}, - {file = "coverage-7.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:ac9e95cefcf044c98d4e2c829cd0669918585755dd9a92e28a1a7012322d0a95"}, - {file = "coverage-7.4.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:f593a4a90118d99014517c2679e04a4ef5aee2d81aa05c26c734d271065efcb6"}, - {file = "coverage-7.4.2.tar.gz", hash = "sha256:1a5ee18e3a8d766075ce9314ed1cb695414bae67df6a4b0805f5137d93d6f1cb"}, + {file = "coverage-7.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8580b827d4746d47294c0e0b92854c85a92c2227927433998f0d3320ae8a71b6"}, + {file = "coverage-7.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:718187eeb9849fc6cc23e0d9b092bc2348821c5e1a901c9f8975df0bc785bfd4"}, + {file = "coverage-7.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:767b35c3a246bcb55b8044fd3a43b8cd553dd1f9f2c1eeb87a302b1f8daa0524"}, + {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae7f19afe0cce50039e2c782bff379c7e347cba335429678450b8fe81c4ef96d"}, + {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba3a8aaed13770e970b3df46980cb068d1c24af1a1968b7818b69af8c4347efb"}, + {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ee866acc0861caebb4f2ab79f0b94dbfbdbfadc19f82e6e9c93930f74e11d7a0"}, + {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:506edb1dd49e13a2d4cac6a5173317b82a23c9d6e8df63efb4f0380de0fbccbc"}, + {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd6545d97c98a192c5ac995d21c894b581f1fd14cf389be90724d21808b657e2"}, + {file = "coverage-7.4.3-cp310-cp310-win32.whl", hash = "sha256:f6a09b360d67e589236a44f0c39218a8efba2593b6abdccc300a8862cffc2f94"}, + {file = "coverage-7.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:18d90523ce7553dd0b7e23cbb28865db23cddfd683a38fb224115f7826de78d0"}, + {file = "coverage-7.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbbe5e739d45a52f3200a771c6d2c7acf89eb2524890a4a3aa1a7fa0695d2a47"}, + {file = "coverage-7.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:489763b2d037b164846ebac0cbd368b8a4ca56385c4090807ff9fad817de4113"}, + {file = "coverage-7.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:451f433ad901b3bb00184d83fd83d135fb682d780b38af7944c9faeecb1e0bfe"}, + {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcc66e222cf4c719fe7722a403888b1f5e1682d1679bd780e2b26c18bb648cdc"}, + {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ec74cfef2d985e145baae90d9b1b32f85e1741b04cd967aaf9cfa84c1334f3"}, + {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:abbbd8093c5229c72d4c2926afaee0e6e3140de69d5dcd918b2921f2f0c8baba"}, + {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:35eb581efdacf7b7422af677b92170da4ef34500467381e805944a3201df2079"}, + {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8249b1c7334be8f8c3abcaaa996e1e4927b0e5a23b65f5bf6cfe3180d8ca7840"}, + {file = "coverage-7.4.3-cp311-cp311-win32.whl", hash = "sha256:cf30900aa1ba595312ae41978b95e256e419d8a823af79ce670835409fc02ad3"}, + {file = "coverage-7.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:18c7320695c949de11a351742ee001849912fd57e62a706d83dfc1581897fa2e"}, + {file = "coverage-7.4.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b51bfc348925e92a9bd9b2e48dad13431b57011fd1038f08316e6bf1df107d10"}, + {file = "coverage-7.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d6cdecaedea1ea9e033d8adf6a0ab11107b49571bbb9737175444cea6eb72328"}, + {file = "coverage-7.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b2eccb883368f9e972e216c7b4c7c06cabda925b5f06dde0650281cb7666a30"}, + {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c00cdc8fa4e50e1cc1f941a7f2e3e0f26cb2a1233c9696f26963ff58445bac7"}, + {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a4a8dd3dcf4cbd3165737358e4d7dfbd9d59902ad11e3b15eebb6393b0446e"}, + {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:062b0a75d9261e2f9c6d071753f7eef0fc9caf3a2c82d36d76667ba7b6470003"}, + {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ebe7c9e67a2d15fa97b77ea6571ce5e1e1f6b0db71d1d5e96f8d2bf134303c1d"}, + {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c0a120238dd71c68484f02562f6d446d736adcc6ca0993712289b102705a9a3a"}, + {file = "coverage-7.4.3-cp312-cp312-win32.whl", hash = "sha256:37389611ba54fd6d278fde86eb2c013c8e50232e38f5c68235d09d0a3f8aa352"}, + {file = "coverage-7.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:d25b937a5d9ffa857d41be042b4238dd61db888533b53bc76dc082cb5a15e914"}, + {file = "coverage-7.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:28ca2098939eabab044ad68850aac8f8db6bf0b29bc7f2887d05889b17346454"}, + {file = "coverage-7.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:280459f0a03cecbe8800786cdc23067a8fc64c0bd51dc614008d9c36e1659d7e"}, + {file = "coverage-7.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c0cdedd3500e0511eac1517bf560149764b7d8e65cb800d8bf1c63ebf39edd2"}, + {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a9babb9466fe1da12417a4aed923e90124a534736de6201794a3aea9d98484e"}, + {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dec9de46a33cf2dd87a5254af095a409ea3bf952d85ad339751e7de6d962cde6"}, + {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:16bae383a9cc5abab9bb05c10a3e5a52e0a788325dc9ba8499e821885928968c"}, + {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2c854ce44e1ee31bda4e318af1dbcfc929026d12c5ed030095ad98197eeeaed0"}, + {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ce8c50520f57ec57aa21a63ea4f325c7b657386b3f02ccaedeccf9ebe27686e1"}, + {file = "coverage-7.4.3-cp38-cp38-win32.whl", hash = "sha256:708a3369dcf055c00ddeeaa2b20f0dd1ce664eeabde6623e516c5228b753654f"}, + {file = "coverage-7.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:1bf25fbca0c8d121a3e92a2a0555c7e5bc981aee5c3fdaf4bb7809f410f696b9"}, + {file = "coverage-7.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b253094dbe1b431d3a4ac2f053b6d7ede2664ac559705a704f621742e034f1f"}, + {file = "coverage-7.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77fbfc5720cceac9c200054b9fab50cb2a7d79660609200ab83f5db96162d20c"}, + {file = "coverage-7.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6679060424faa9c11808598504c3ab472de4531c571ab2befa32f4971835788e"}, + {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4af154d617c875b52651dd8dd17a31270c495082f3d55f6128e7629658d63765"}, + {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8640f1fde5e1b8e3439fe482cdc2b0bb6c329f4bb161927c28d2e8879c6029ee"}, + {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:69b9f6f66c0af29642e73a520b6fed25ff9fd69a25975ebe6acb297234eda501"}, + {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0842571634f39016a6c03e9d4aba502be652a6e4455fadb73cd3a3a49173e38f"}, + {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a78ed23b08e8ab524551f52953a8a05d61c3a760781762aac49f8de6eede8c45"}, + {file = "coverage-7.4.3-cp39-cp39-win32.whl", hash = "sha256:c0524de3ff096e15fcbfe8f056fdb4ea0bf497d584454f344d59fce069d3e6e9"}, + {file = "coverage-7.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:0209a6369ccce576b43bb227dc8322d8ef9e323d089c6f3f26a597b09cb4d2aa"}, + {file = "coverage-7.4.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:7cbde573904625509a3f37b6fecea974e363460b556a627c60dc2f47e2fffa51"}, + {file = "coverage-7.4.3.tar.gz", hash = "sha256:276f6077a5c61447a48d133ed13e759c09e62aff0dc84274a68dc18660104d52"}, ] [package.extras] @@ -720,43 +735,43 @@ dev = ["polib"] [[package]] name = "cryptography" -version = "42.0.4" +version = "42.0.5" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.4-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449"}, - {file = "cryptography-42.0.4-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18"}, - {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad9c385ba8ee025bb0d856714f71d7840020fe176ae0229de618f14dae7a6e2"}, - {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69b22ab6506a3fe483d67d1ed878e1602bdd5912a134e6202c1ec672233241c1"}, - {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e09469a2cec88fb7b078e16d4adec594414397e8879a4341c6ace96013463d5b"}, - {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3e970a2119507d0b104f0a8e281521ad28fc26f2820687b3436b8c9a5fcf20d1"}, - {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:e53dc41cda40b248ebc40b83b31516487f7db95ab8ceac1f042626bc43a2f992"}, - {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c3a5cbc620e1e17009f30dd34cb0d85c987afd21c41a74352d1719be33380885"}, - {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6bfadd884e7280df24d26f2186e4e07556a05d37393b0f220a840b083dc6a824"}, - {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:01911714117642a3f1792c7f376db572aadadbafcd8d75bb527166009c9f1d1b"}, - {file = "cryptography-42.0.4-cp37-abi3-win32.whl", hash = "sha256:fb0cef872d8193e487fc6bdb08559c3aa41b659a7d9be48b2e10747f47863925"}, - {file = "cryptography-42.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:c1f25b252d2c87088abc8bbc4f1ecbf7c919e05508a7e8628e6875c40bc70923"}, - {file = "cryptography-42.0.4-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:15a1fb843c48b4a604663fa30af60818cd28f895572386e5f9b8a665874c26e7"}, - {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1327f280c824ff7885bdeef8578f74690e9079267c1c8bd7dc5cc5aa065ae52"}, - {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ffb03d419edcab93b4b19c22ee80c007fb2d708429cecebf1dd3258956a563a"}, - {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:1df6fcbf60560d2113b5ed90f072dc0b108d64750d4cbd46a21ec882c7aefce9"}, - {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:44a64043f743485925d3bcac548d05df0f9bb445c5fcca6681889c7c3ab12764"}, - {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:3c6048f217533d89f2f8f4f0fe3044bf0b2090453b7b73d0b77db47b80af8dff"}, - {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6d0fbe73728c44ca3a241eff9aefe6496ab2656d6e7a4ea2459865f2e8613257"}, - {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:887623fe0d70f48ab3f5e4dbf234986b1329a64c066d719432d0698522749929"}, - {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ce8613beaffc7c14f091497346ef117c1798c202b01153a8cc7b8e2ebaaf41c0"}, - {file = "cryptography-42.0.4-cp39-abi3-win32.whl", hash = "sha256:810bcf151caefc03e51a3d61e53335cd5c7316c0a105cc695f0959f2c638b129"}, - {file = "cryptography-42.0.4-cp39-abi3-win_amd64.whl", hash = "sha256:a0298bdc6e98ca21382afe914c642620370ce0470a01e1bef6dd9b5354c36854"}, - {file = "cryptography-42.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f8907fcf57392cd917892ae83708761c6ff3c37a8e835d7246ff0ad251d9298"}, - {file = "cryptography-42.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:12d341bd42cdb7d4937b0cabbdf2a94f949413ac4504904d0cdbdce4a22cbf88"}, - {file = "cryptography-42.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1cdcdbd117681c88d717437ada72bdd5be9de117f96e3f4d50dab3f59fd9ab20"}, - {file = "cryptography-42.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0e89f7b84f421c56e7ff69f11c441ebda73b8a8e6488d322ef71746224c20fce"}, - {file = "cryptography-42.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f1e85a178384bf19e36779d91ff35c7617c885da487d689b05c1366f9933ad74"}, - {file = "cryptography-42.0.4-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d2a27aca5597c8a71abbe10209184e1a8e91c1fd470b5070a2ea60cafec35bcd"}, - {file = "cryptography-42.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4e36685cb634af55e0677d435d425043967ac2f3790ec652b2b88ad03b85c27b"}, - {file = "cryptography-42.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660"}, - {file = "cryptography-42.0.4.tar.gz", hash = "sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, + {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, + {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, + {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, + {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, + {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, + {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, ] [package.dependencies] @@ -1472,16 +1487,17 @@ six = ">=1.12" [[package]] name = "griffe" -version = "0.40.1" +version = "0.41.0" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." optional = false python-versions = ">=3.8" files = [ - {file = "griffe-0.40.1-py3-none-any.whl", hash = "sha256:5b8c023f366fe273e762131fe4bfd141ea56c09b3cb825aa92d06a82681cfd93"}, - {file = "griffe-0.40.1.tar.gz", hash = "sha256:66c48a62e2ce5784b6940e603300fcfb807b6f099b94e7f753f1841661fd5c7c"}, + {file = "griffe-0.41.0-py3-none-any.whl", hash = "sha256:8aa7fc6eb00cb80af9c0198178c6b7110cb59fa2c5187bb13ea25eebbe4dd928"}, + {file = "griffe-0.41.0.tar.gz", hash = "sha256:850128c3198c18713eaf0a6cc8572e590a16b1965f72a4e871e66cf84740903f"}, ] [package.dependencies] +astunparse = {version = ">=1.6", markers = "python_version < \"3.9\""} colorama = ">=0.4" [[package]] @@ -3934,19 +3950,19 @@ paramiko = "*" [[package]] name = "setuptools" -version = "69.1.0" +version = "69.1.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"}, + {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "singledispatch" @@ -3987,13 +4003,13 @@ files = [ [[package]] name = "sniffio" -version = "1.3.0" +version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" files = [ - {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, - {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] [[package]] @@ -4074,13 +4090,13 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] [[package]] name = "stevedore" -version = "5.1.0" +version = "5.2.0" description = "Manage dynamic plugins for Python applications" optional = false python-versions = ">=3.8" files = [ - {file = "stevedore-5.1.0-py3-none-any.whl", hash = "sha256:8cc040628f3cea5d7128f2e76cf486b2251a4e543c7b938f58d9a377f6694a2d"}, - {file = "stevedore-5.1.0.tar.gz", hash = "sha256:a54534acf9b89bc7ed264807013b505bf07f74dbe4bcfa37d32bd063870b087c"}, + {file = "stevedore-5.2.0-py3-none-any.whl", hash = "sha256:1c15d95766ca0569cad14cb6272d4d31dae66b011a929d7c18219c176ea1b5c9"}, + {file = "stevedore-5.2.0.tar.gz", hash = "sha256:46b93ca40e1114cea93d738a6c1e365396981bb6bb78c27045b7587c9473544d"}, ] [package.dependencies] @@ -4262,13 +4278,13 @@ docs = ["mkdocs (==1.2.4)", "mkdocs-material (==7.2.2)", "mkdocs-material-extens [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -4372,6 +4388,20 @@ files = [ {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, ] +[[package]] +name = "wheel" +version = "0.42.0" +description = "A built-package format for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "wheel-0.42.0-py3-none-any.whl", hash = "sha256:177f9c9b0d45c47873b619f5b650346d632cdc35fb5e4d25058e09c9e581433d"}, + {file = "wheel-0.42.0.tar.gz", hash = "sha256:c45be39f7882c9d34243236f2d63cbd58039e360f85d0913425fbd7ceea617a8"}, +] + +[package.extras] +test = ["pytest (>=6.0.0)", "setuptools (>=65)"] + [[package]] name = "wrapt" version = "1.16.0" From 3c2e27cfc059d4cd9a8dc10ca6a396a53cc14557 Mon Sep 17 00:00:00 2001 From: David Cates Date: Mon, 26 Feb 2024 16:26:14 -0700 Subject: [PATCH 135/225] update lock and toml files --- poetry.lock | 236 ++++++++++++++++++++++++++++--------------------- pyproject.toml | 2 +- 2 files changed, 134 insertions(+), 104 deletions(-) diff --git a/poetry.lock b/poetry.lock index d8276873..3eb6e194 100755 --- a/poetry.lock +++ b/poetry.lock @@ -112,6 +112,21 @@ six = ">=1.12.0" astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] +[[package]] +name = "astunparse" +version = "1.6.3" +description = "An AST unparser for Python" +optional = false +python-versions = "*" +files = [ + {file = "astunparse-1.6.3-py2.py3-none-any.whl", hash = "sha256:c2652417f2c8b5bb325c885ae329bdf3f86424075c4fd1a128674bc6fba4b8e8"}, + {file = "astunparse-1.6.3.tar.gz", hash = "sha256:5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872"}, +] + +[package.dependencies] +six = ">=1.6.1,<2.0" +wheel = ">=0.23.0,<1.0" + [[package]] name = "async-timeout" version = "4.0.3" @@ -642,63 +657,63 @@ files = [ [[package]] name = "coverage" -version = "7.4.2" +version = "7.4.3" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf54c3e089179d9d23900e3efc86d46e4431188d9a657f345410eecdd0151f50"}, - {file = "coverage-7.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fe6e43c8b510719b48af7db9631b5fbac910ade4bd90e6378c85ac5ac706382c"}, - {file = "coverage-7.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b98c89db1b150d851a7840142d60d01d07677a18f0f46836e691c38134ed18b"}, - {file = "coverage-7.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5f9683be6a5b19cd776ee4e2f2ffb411424819c69afab6b2db3a0a364ec6642"}, - {file = "coverage-7.4.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78cdcbf7b9cb83fe047ee09298e25b1cd1636824067166dc97ad0543b079d22f"}, - {file = "coverage-7.4.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2599972b21911111114100d362aea9e70a88b258400672626efa2b9e2179609c"}, - {file = "coverage-7.4.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ef00d31b7569ed3cb2036f26565f1984b9fc08541731ce01012b02a4c238bf03"}, - {file = "coverage-7.4.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:20a875bfd8c282985c4720c32aa05056f77a68e6d8bbc5fe8632c5860ee0b49b"}, - {file = "coverage-7.4.2-cp310-cp310-win32.whl", hash = "sha256:b3f2b1eb229f23c82898eedfc3296137cf1f16bb145ceab3edfd17cbde273fb7"}, - {file = "coverage-7.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7df95fdd1432a5d2675ce630fef5f239939e2b3610fe2f2b5bf21fa505256fa3"}, - {file = "coverage-7.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8ddbd158e069dded57738ea69b9744525181e99974c899b39f75b2b29a624e2"}, - {file = "coverage-7.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81a5fb41b0d24447a47543b749adc34d45a2cf77b48ca74e5bf3de60a7bd9edc"}, - {file = "coverage-7.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2412e98e70f16243be41d20836abd5f3f32edef07cbf8f407f1b6e1ceae783ac"}, - {file = "coverage-7.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb79414c15c6f03f56cc68fa06994f047cf20207c31b5dad3f6bab54a0f66ef"}, - {file = "coverage-7.4.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf89ab85027427d351f1de918aff4b43f4eb5f33aff6835ed30322a86ac29c9e"}, - {file = "coverage-7.4.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a178b7b1ac0f1530bb28d2e51f88c0bab3e5949835851a60dda80bff6052510c"}, - {file = "coverage-7.4.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:06fe398145a2e91edaf1ab4eee66149c6776c6b25b136f4a86fcbbb09512fd10"}, - {file = "coverage-7.4.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:18cac867950943fe93d6cd56a67eb7dcd2d4a781a40f4c1e25d6f1ed98721a55"}, - {file = "coverage-7.4.2-cp311-cp311-win32.whl", hash = "sha256:f72cdd2586f9a769570d4b5714a3837b3a59a53b096bb954f1811f6a0afad305"}, - {file = "coverage-7.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:d779a48fac416387dd5673fc5b2d6bd903ed903faaa3247dc1865c65eaa5a93e"}, - {file = "coverage-7.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:adbdfcda2469d188d79771d5696dc54fab98a16d2ef7e0875013b5f56a251047"}, - {file = "coverage-7.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ac4bab32f396b03ebecfcf2971668da9275b3bb5f81b3b6ba96622f4ef3f6e17"}, - {file = "coverage-7.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:006d220ba2e1a45f1de083d5022d4955abb0aedd78904cd5a779b955b019ec73"}, - {file = "coverage-7.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3733545eb294e5ad274abe131d1e7e7de4ba17a144505c12feca48803fea5f64"}, - {file = "coverage-7.4.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42a9e754aa250fe61f0f99986399cec086d7e7a01dd82fd863a20af34cbce962"}, - {file = "coverage-7.4.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2ed37e16cf35c8d6e0b430254574b8edd242a367a1b1531bd1adc99c6a5e00fe"}, - {file = "coverage-7.4.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:b953275d4edfab6cc0ed7139fa773dfb89e81fee1569a932f6020ce7c6da0e8f"}, - {file = "coverage-7.4.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32b4ab7e6c924f945cbae5392832e93e4ceb81483fd6dc4aa8fb1a97b9d3e0e1"}, - {file = "coverage-7.4.2-cp312-cp312-win32.whl", hash = "sha256:f5df76c58977bc35a49515b2fbba84a1d952ff0ec784a4070334dfbec28a2def"}, - {file = "coverage-7.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:34423abbaad70fea9d0164add189eabaea679068ebdf693baa5c02d03e7db244"}, - {file = "coverage-7.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5b11f9c6587668e495cc7365f85c93bed34c3a81f9f08b0920b87a89acc13469"}, - {file = "coverage-7.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:51593a1f05c39332f623d64d910445fdec3d2ac2d96b37ce7f331882d5678ddf"}, - {file = "coverage-7.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69f1665165ba2fe7614e2f0c1aed71e14d83510bf67e2ee13df467d1c08bf1e8"}, - {file = "coverage-7.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3c8bbb95a699c80a167478478efe5e09ad31680931ec280bf2087905e3b95ec"}, - {file = "coverage-7.4.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:175f56572f25e1e1201d2b3e07b71ca4d201bf0b9cb8fad3f1dfae6a4188de86"}, - {file = "coverage-7.4.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8562ca91e8c40864942615b1d0b12289d3e745e6b2da901d133f52f2d510a1e3"}, - {file = "coverage-7.4.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d9a1ef0f173e1a19738f154fb3644f90d0ada56fe6c9b422f992b04266c55d5a"}, - {file = "coverage-7.4.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f40ac873045db4fd98a6f40387d242bde2708a3f8167bd967ccd43ad46394ba2"}, - {file = "coverage-7.4.2-cp38-cp38-win32.whl", hash = "sha256:d1b750a8409bec61caa7824bfd64a8074b6d2d420433f64c161a8335796c7c6b"}, - {file = "coverage-7.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b4ae777bebaed89e3a7e80c4a03fac434a98a8abb5251b2a957d38fe3fd30088"}, - {file = "coverage-7.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ff7f92ae5a456101ca8f48387fd3c56eb96353588e686286f50633a611afc95"}, - {file = "coverage-7.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:861d75402269ffda0b33af94694b8e0703563116b04c681b1832903fac8fd647"}, - {file = "coverage-7.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3507427d83fa961cbd73f11140f4a5ce84208d31756f7238d6257b2d3d868405"}, - {file = "coverage-7.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf711d517e21fb5bc429f5c4308fbc430a8585ff2a43e88540264ae87871e36a"}, - {file = "coverage-7.4.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c00e54f0bd258ab25e7f731ca1d5144b0bf7bec0051abccd2bdcff65fa3262c9"}, - {file = "coverage-7.4.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f8e845d894e39fb53834da826078f6dc1a933b32b1478cf437007367efaf6f6a"}, - {file = "coverage-7.4.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:840456cb1067dc350af9080298c7c2cfdddcedc1cb1e0b30dceecdaf7be1a2d3"}, - {file = "coverage-7.4.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c11ca2df2206a4e3e4c4567f52594637392ed05d7c7fb73b4ea1c658ba560265"}, - {file = "coverage-7.4.2-cp39-cp39-win32.whl", hash = "sha256:3ff5bdb08d8938d336ce4088ca1a1e4b6c8cd3bef8bb3a4c0eb2f37406e49643"}, - {file = "coverage-7.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:ac9e95cefcf044c98d4e2c829cd0669918585755dd9a92e28a1a7012322d0a95"}, - {file = "coverage-7.4.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:f593a4a90118d99014517c2679e04a4ef5aee2d81aa05c26c734d271065efcb6"}, - {file = "coverage-7.4.2.tar.gz", hash = "sha256:1a5ee18e3a8d766075ce9314ed1cb695414bae67df6a4b0805f5137d93d6f1cb"}, + {file = "coverage-7.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8580b827d4746d47294c0e0b92854c85a92c2227927433998f0d3320ae8a71b6"}, + {file = "coverage-7.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:718187eeb9849fc6cc23e0d9b092bc2348821c5e1a901c9f8975df0bc785bfd4"}, + {file = "coverage-7.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:767b35c3a246bcb55b8044fd3a43b8cd553dd1f9f2c1eeb87a302b1f8daa0524"}, + {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae7f19afe0cce50039e2c782bff379c7e347cba335429678450b8fe81c4ef96d"}, + {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba3a8aaed13770e970b3df46980cb068d1c24af1a1968b7818b69af8c4347efb"}, + {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ee866acc0861caebb4f2ab79f0b94dbfbdbfadc19f82e6e9c93930f74e11d7a0"}, + {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:506edb1dd49e13a2d4cac6a5173317b82a23c9d6e8df63efb4f0380de0fbccbc"}, + {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd6545d97c98a192c5ac995d21c894b581f1fd14cf389be90724d21808b657e2"}, + {file = "coverage-7.4.3-cp310-cp310-win32.whl", hash = "sha256:f6a09b360d67e589236a44f0c39218a8efba2593b6abdccc300a8862cffc2f94"}, + {file = "coverage-7.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:18d90523ce7553dd0b7e23cbb28865db23cddfd683a38fb224115f7826de78d0"}, + {file = "coverage-7.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbbe5e739d45a52f3200a771c6d2c7acf89eb2524890a4a3aa1a7fa0695d2a47"}, + {file = "coverage-7.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:489763b2d037b164846ebac0cbd368b8a4ca56385c4090807ff9fad817de4113"}, + {file = "coverage-7.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:451f433ad901b3bb00184d83fd83d135fb682d780b38af7944c9faeecb1e0bfe"}, + {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcc66e222cf4c719fe7722a403888b1f5e1682d1679bd780e2b26c18bb648cdc"}, + {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ec74cfef2d985e145baae90d9b1b32f85e1741b04cd967aaf9cfa84c1334f3"}, + {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:abbbd8093c5229c72d4c2926afaee0e6e3140de69d5dcd918b2921f2f0c8baba"}, + {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:35eb581efdacf7b7422af677b92170da4ef34500467381e805944a3201df2079"}, + {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8249b1c7334be8f8c3abcaaa996e1e4927b0e5a23b65f5bf6cfe3180d8ca7840"}, + {file = "coverage-7.4.3-cp311-cp311-win32.whl", hash = "sha256:cf30900aa1ba595312ae41978b95e256e419d8a823af79ce670835409fc02ad3"}, + {file = "coverage-7.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:18c7320695c949de11a351742ee001849912fd57e62a706d83dfc1581897fa2e"}, + {file = "coverage-7.4.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b51bfc348925e92a9bd9b2e48dad13431b57011fd1038f08316e6bf1df107d10"}, + {file = "coverage-7.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d6cdecaedea1ea9e033d8adf6a0ab11107b49571bbb9737175444cea6eb72328"}, + {file = "coverage-7.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b2eccb883368f9e972e216c7b4c7c06cabda925b5f06dde0650281cb7666a30"}, + {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c00cdc8fa4e50e1cc1f941a7f2e3e0f26cb2a1233c9696f26963ff58445bac7"}, + {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a4a8dd3dcf4cbd3165737358e4d7dfbd9d59902ad11e3b15eebb6393b0446e"}, + {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:062b0a75d9261e2f9c6d071753f7eef0fc9caf3a2c82d36d76667ba7b6470003"}, + {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ebe7c9e67a2d15fa97b77ea6571ce5e1e1f6b0db71d1d5e96f8d2bf134303c1d"}, + {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c0a120238dd71c68484f02562f6d446d736adcc6ca0993712289b102705a9a3a"}, + {file = "coverage-7.4.3-cp312-cp312-win32.whl", hash = "sha256:37389611ba54fd6d278fde86eb2c013c8e50232e38f5c68235d09d0a3f8aa352"}, + {file = "coverage-7.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:d25b937a5d9ffa857d41be042b4238dd61db888533b53bc76dc082cb5a15e914"}, + {file = "coverage-7.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:28ca2098939eabab044ad68850aac8f8db6bf0b29bc7f2887d05889b17346454"}, + {file = "coverage-7.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:280459f0a03cecbe8800786cdc23067a8fc64c0bd51dc614008d9c36e1659d7e"}, + {file = "coverage-7.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c0cdedd3500e0511eac1517bf560149764b7d8e65cb800d8bf1c63ebf39edd2"}, + {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a9babb9466fe1da12417a4aed923e90124a534736de6201794a3aea9d98484e"}, + {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dec9de46a33cf2dd87a5254af095a409ea3bf952d85ad339751e7de6d962cde6"}, + {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:16bae383a9cc5abab9bb05c10a3e5a52e0a788325dc9ba8499e821885928968c"}, + {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2c854ce44e1ee31bda4e318af1dbcfc929026d12c5ed030095ad98197eeeaed0"}, + {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ce8c50520f57ec57aa21a63ea4f325c7b657386b3f02ccaedeccf9ebe27686e1"}, + {file = "coverage-7.4.3-cp38-cp38-win32.whl", hash = "sha256:708a3369dcf055c00ddeeaa2b20f0dd1ce664eeabde6623e516c5228b753654f"}, + {file = "coverage-7.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:1bf25fbca0c8d121a3e92a2a0555c7e5bc981aee5c3fdaf4bb7809f410f696b9"}, + {file = "coverage-7.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b253094dbe1b431d3a4ac2f053b6d7ede2664ac559705a704f621742e034f1f"}, + {file = "coverage-7.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77fbfc5720cceac9c200054b9fab50cb2a7d79660609200ab83f5db96162d20c"}, + {file = "coverage-7.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6679060424faa9c11808598504c3ab472de4531c571ab2befa32f4971835788e"}, + {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4af154d617c875b52651dd8dd17a31270c495082f3d55f6128e7629658d63765"}, + {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8640f1fde5e1b8e3439fe482cdc2b0bb6c329f4bb161927c28d2e8879c6029ee"}, + {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:69b9f6f66c0af29642e73a520b6fed25ff9fd69a25975ebe6acb297234eda501"}, + {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0842571634f39016a6c03e9d4aba502be652a6e4455fadb73cd3a3a49173e38f"}, + {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a78ed23b08e8ab524551f52953a8a05d61c3a760781762aac49f8de6eede8c45"}, + {file = "coverage-7.4.3-cp39-cp39-win32.whl", hash = "sha256:c0524de3ff096e15fcbfe8f056fdb4ea0bf497d584454f344d59fce069d3e6e9"}, + {file = "coverage-7.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:0209a6369ccce576b43bb227dc8322d8ef9e323d089c6f3f26a597b09cb4d2aa"}, + {file = "coverage-7.4.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:7cbde573904625509a3f37b6fecea974e363460b556a627c60dc2f47e2fffa51"}, + {file = "coverage-7.4.3.tar.gz", hash = "sha256:276f6077a5c61447a48d133ed13e759c09e62aff0dc84274a68dc18660104d52"}, ] [package.extras] @@ -720,43 +735,43 @@ dev = ["polib"] [[package]] name = "cryptography" -version = "42.0.4" +version = "42.0.5" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.4-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449"}, - {file = "cryptography-42.0.4-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18"}, - {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad9c385ba8ee025bb0d856714f71d7840020fe176ae0229de618f14dae7a6e2"}, - {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69b22ab6506a3fe483d67d1ed878e1602bdd5912a134e6202c1ec672233241c1"}, - {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e09469a2cec88fb7b078e16d4adec594414397e8879a4341c6ace96013463d5b"}, - {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3e970a2119507d0b104f0a8e281521ad28fc26f2820687b3436b8c9a5fcf20d1"}, - {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:e53dc41cda40b248ebc40b83b31516487f7db95ab8ceac1f042626bc43a2f992"}, - {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c3a5cbc620e1e17009f30dd34cb0d85c987afd21c41a74352d1719be33380885"}, - {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6bfadd884e7280df24d26f2186e4e07556a05d37393b0f220a840b083dc6a824"}, - {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:01911714117642a3f1792c7f376db572aadadbafcd8d75bb527166009c9f1d1b"}, - {file = "cryptography-42.0.4-cp37-abi3-win32.whl", hash = "sha256:fb0cef872d8193e487fc6bdb08559c3aa41b659a7d9be48b2e10747f47863925"}, - {file = "cryptography-42.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:c1f25b252d2c87088abc8bbc4f1ecbf7c919e05508a7e8628e6875c40bc70923"}, - {file = "cryptography-42.0.4-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:15a1fb843c48b4a604663fa30af60818cd28f895572386e5f9b8a665874c26e7"}, - {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1327f280c824ff7885bdeef8578f74690e9079267c1c8bd7dc5cc5aa065ae52"}, - {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ffb03d419edcab93b4b19c22ee80c007fb2d708429cecebf1dd3258956a563a"}, - {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:1df6fcbf60560d2113b5ed90f072dc0b108d64750d4cbd46a21ec882c7aefce9"}, - {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:44a64043f743485925d3bcac548d05df0f9bb445c5fcca6681889c7c3ab12764"}, - {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:3c6048f217533d89f2f8f4f0fe3044bf0b2090453b7b73d0b77db47b80af8dff"}, - {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6d0fbe73728c44ca3a241eff9aefe6496ab2656d6e7a4ea2459865f2e8613257"}, - {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:887623fe0d70f48ab3f5e4dbf234986b1329a64c066d719432d0698522749929"}, - {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ce8613beaffc7c14f091497346ef117c1798c202b01153a8cc7b8e2ebaaf41c0"}, - {file = "cryptography-42.0.4-cp39-abi3-win32.whl", hash = "sha256:810bcf151caefc03e51a3d61e53335cd5c7316c0a105cc695f0959f2c638b129"}, - {file = "cryptography-42.0.4-cp39-abi3-win_amd64.whl", hash = "sha256:a0298bdc6e98ca21382afe914c642620370ce0470a01e1bef6dd9b5354c36854"}, - {file = "cryptography-42.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f8907fcf57392cd917892ae83708761c6ff3c37a8e835d7246ff0ad251d9298"}, - {file = "cryptography-42.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:12d341bd42cdb7d4937b0cabbdf2a94f949413ac4504904d0cdbdce4a22cbf88"}, - {file = "cryptography-42.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1cdcdbd117681c88d717437ada72bdd5be9de117f96e3f4d50dab3f59fd9ab20"}, - {file = "cryptography-42.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0e89f7b84f421c56e7ff69f11c441ebda73b8a8e6488d322ef71746224c20fce"}, - {file = "cryptography-42.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f1e85a178384bf19e36779d91ff35c7617c885da487d689b05c1366f9933ad74"}, - {file = "cryptography-42.0.4-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d2a27aca5597c8a71abbe10209184e1a8e91c1fd470b5070a2ea60cafec35bcd"}, - {file = "cryptography-42.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4e36685cb634af55e0677d435d425043967ac2f3790ec652b2b88ad03b85c27b"}, - {file = "cryptography-42.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660"}, - {file = "cryptography-42.0.4.tar.gz", hash = "sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, + {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, + {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, + {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, + {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, + {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, + {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, ] [package.dependencies] @@ -1472,16 +1487,17 @@ six = ">=1.12" [[package]] name = "griffe" -version = "0.40.1" +version = "0.41.0" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." optional = false python-versions = ">=3.8" files = [ - {file = "griffe-0.40.1-py3-none-any.whl", hash = "sha256:5b8c023f366fe273e762131fe4bfd141ea56c09b3cb825aa92d06a82681cfd93"}, - {file = "griffe-0.40.1.tar.gz", hash = "sha256:66c48a62e2ce5784b6940e603300fcfb807b6f099b94e7f753f1841661fd5c7c"}, + {file = "griffe-0.41.0-py3-none-any.whl", hash = "sha256:8aa7fc6eb00cb80af9c0198178c6b7110cb59fa2c5187bb13ea25eebbe4dd928"}, + {file = "griffe-0.41.0.tar.gz", hash = "sha256:850128c3198c18713eaf0a6cc8572e590a16b1965f72a4e871e66cf84740903f"}, ] [package.dependencies] +astunparse = {version = ">=1.6", markers = "python_version < \"3.9\""} colorama = ">=0.4" [[package]] @@ -3934,19 +3950,19 @@ paramiko = "*" [[package]] name = "setuptools" -version = "69.1.0" +version = "69.1.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"}, + {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "singledispatch" @@ -3987,13 +4003,13 @@ files = [ [[package]] name = "sniffio" -version = "1.3.0" +version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" files = [ - {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, - {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] [[package]] @@ -4074,13 +4090,13 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] [[package]] name = "stevedore" -version = "5.1.0" +version = "5.2.0" description = "Manage dynamic plugins for Python applications" optional = false python-versions = ">=3.8" files = [ - {file = "stevedore-5.1.0-py3-none-any.whl", hash = "sha256:8cc040628f3cea5d7128f2e76cf486b2251a4e543c7b938f58d9a377f6694a2d"}, - {file = "stevedore-5.1.0.tar.gz", hash = "sha256:a54534acf9b89bc7ed264807013b505bf07f74dbe4bcfa37d32bd063870b087c"}, + {file = "stevedore-5.2.0-py3-none-any.whl", hash = "sha256:1c15d95766ca0569cad14cb6272d4d31dae66b011a929d7c18219c176ea1b5c9"}, + {file = "stevedore-5.2.0.tar.gz", hash = "sha256:46b93ca40e1114cea93d738a6c1e365396981bb6bb78c27045b7587c9473544d"}, ] [package.dependencies] @@ -4262,13 +4278,13 @@ docs = ["mkdocs (==1.2.4)", "mkdocs-material (==7.2.2)", "mkdocs-material-extens [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -4372,6 +4388,20 @@ files = [ {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, ] +[[package]] +name = "wheel" +version = "0.42.0" +description = "A built-package format for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "wheel-0.42.0-py3-none-any.whl", hash = "sha256:177f9c9b0d45c47873b619f5b650346d632cdc35fb5e4d25058e09c9e581433d"}, + {file = "wheel-0.42.0.tar.gz", hash = "sha256:c45be39f7882c9d34243236f2d63cbd58039e360f85d0913425fbd7ceea617a8"}, +] + +[package.extras] +test = ["pytest (>=6.0.0)", "setuptools (>=65)"] + [[package]] name = "wrapt" version = "1.16.0" diff --git a/pyproject.toml b/pyproject.toml index 4bed9f36..bf7e6998 100755 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nautobot-device-onboarding" -version = "3.0.2a4" +version = "3.0.2a5" description = "A app for Nautobot to easily onboard new devices." authors = ["Network to Code, LLC "] license = "Apache-2.0" From 16648c43ebdd4b5d7f8b93e303822cc58ad644cb Mon Sep 17 00:00:00 2001 From: David Cates Date: Wed, 28 Feb 2024 12:27:21 -0700 Subject: [PATCH 136/225] update logging around existing devies --- .../diffsync/models/onboarding_models.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/models/onboarding_models.py b/nautobot_device_onboarding/diffsync/models/onboarding_models.py index 0484a18f..2d7b3f10 100644 --- a/nautobot_device_onboarding/diffsync/models/onboarding_models.py +++ b/nautobot_device_onboarding/diffsync/models/onboarding_models.py @@ -65,27 +65,29 @@ def _get_or_create_device(cls, diffsync, ids, attrs): ) platform = Platform.objects.get(name=attrs["platform__name"]) device = Device.objects.get(name=ids["name"], location=location) - update_devices_without_primary_ip = location = diffsync_utils.retrieve_submitted_value( + update_devices_without_primary_ip = diffsync_utils.retrieve_submitted_value( job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="update_devices_without_primary_ip", ) if update_devices_without_primary_ip: diffsync.job.logger.warning( - f"Device {ids['name']} at location {location} already exists in Nautobot " + f"Device {device.name} at location {location.name} already exists in Nautobot " "but the primary ip address either does not exist, or doesn't match an entered ip address. " "This device will be updated." ) device = cls._update_device_with_attrs(device, platform, ids, attrs, diffsync) else: diffsync.job.logger.warning( - f"Device {ids['name']} at location {location} already exists in Nautobot " + f"Device {device.name} at location {location.name} already exists in Nautobot " "but the primary ip address either does not exist, or doesn't match an entered ip address. " "IP Address, this device will be skipped." ) return None except ObjectDoesNotExist: + print(ids["name"], location) + print("does not exist") # Create Device device = Device( location=location, @@ -135,7 +137,7 @@ def _get_or_create_interface(cls, diffsync, device, attrs): @classmethod def _update_device_with_attrs(cls, device, platform, ids, attrs, diffsync): """Update a Nautobot device instance.""" - device.location =diffsync_utils.retrieve_submitted_value( + device.location = diffsync_utils.retrieve_submitted_value( job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="location", ) device.status = diffsync_utils.retrieve_submitted_value( From 3d2e7d6ba7fa985e1ef8006f13b19532c04d3e9e Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Wed, 28 Feb 2024 13:51:51 -0600 Subject: [PATCH 137/225] change command mapper formatting and DO for junos --- .../command_mappers/arista_eos.yml | 43 +++++----- .../command_mappers/cisco_ios.yml | 32 +++++-- .../command_mappers/cisco_nxos.yml | 22 +++-- .../command_mappers/juniper_junos.yml | 29 +++++-- .../nornir_plays/command_getter.py | 15 ++-- nautobot_device_onboarding/utils/formatter.py | 83 ++++--------------- 6 files changed, 104 insertions(+), 120 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/arista_eos.yml b/nautobot_device_onboarding/command_mappers/arista_eos.yml index a2b850da..d4fd406b 100755 --- a/nautobot_device_onboarding/command_mappers/arista_eos.yml +++ b/nautobot_device_onboarding/command_mappers/arista_eos.yml @@ -1,30 +1,27 @@ --- device_onboarding: - use_textfsm: false hostname: - # command: "show hostname" - # jpath: "[*].hostname" - command: "show hostname | json" - jpath: "'hostname'" + command: + command: "show hostname" + use_textfsm: true + jpath: "[*].hostname" serial: - # command: "show version" - # jpath: "[*].serial_number" - command: "show version | json" - jpath: "'serialNumber'" + command: + command: "show version" + use_textfsm: true + jpath: "[*].serial_number" device_type: - # command: "show version" - # jpath: "[*].model" - command: "show version | json" - jpath: "'modelName'" + command: + command: "show version" + use_textfsm: true + jpath: "[*].model" mgmt_interface: - # command: "show ip interface brief" - # jpath: "[?interface=='Management1'].interface" - command: "show ip interface brief | json" - #jpath: "[?interface=='Management1'].interface" - jpath: "[*].[$interface$,link_status,protocol_status,hardware_type,mac_address,ip_address,prefix_length,mtu]" + command: + command: "show ip interface brief | json" + use_textfsm: false + jpath: "interfaces.*.{name: name, match_ip: interfaceAddress.ipAddr.address=='{{ obj }}'}[? match_ip].name" mask_length: - # command: "show ip interface brief" - # jpath: "[?interface=='Management1'].ip_address" - command: "show ip interface brief" - jpath: "$interfaces$.*.interfaceAddress[?ipAddr.address=='{{ obj }}']" - # post_processor: "{{ obj[0] | ipaddress_interface('netmask') | netmask_to_cidr }}" + command: + command: "show ip interface brief | json" + use_textfsm: false + jpath: "interfaces.*.interfaceAddress[?ipAddr.address=='{{ obj }}'].ipAddr.maskLen" diff --git a/nautobot_device_onboarding/command_mappers/cisco_ios.yml b/nautobot_device_onboarding/command_mappers/cisco_ios.yml index 8497f36d..8c351613 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_ios.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_ios.yml @@ -1,30 +1,44 @@ --- device_onboarding: - use_textfsm: true + # use_textfsm: true hostname: jpath: "[*].hostname" - command: "show version" + command: + command: "show version" + use_textfsm: true post_processor: "{{ obj[0] | upper }}" serial: - command: "show version" + command: + command: "show version" + use_textfsm: true jpath: "[*].serial[0]" device_type: - command: "show version" + command: + command: "show version" + use_textfsm: true jpath: "[*].hardware[0]" mgmt_interface: - command: "show interfaces" + command: + command: "show interfaces" + use_textfsm: true jpath: "[?ip_address=='{{ obj }}'].{name: interface, enabled: link_status}" post_processor: "{% for interface in obj %}{% if 'up' in interface['enabled'] %}{{ interface.name }}{% endif %}{% endfor %}" mask_length: - command: "show interfaces" + command: + command: "show interfaces" + use_textfsm: true jpath: "[?ip_address=='{{ obj }}'].prefix_length" post_processor: "{{ obj | unique | first }}" network_importer: - use_textfsm: true + # use_textfsm: true serial: - command: "show version" + command: + command: "show version" + use_textfsm: true jpath: "[*].serial[0]" interfaces: - command: "show interfaces" + command: + command: "show interfaces" + use_textfsm: true jpath: "[*].[$interface$,hardware_type,ip_address,prefix_length,mac_address,mtu,description,link_status,vlan_id,vlan_id_inner,vlan_id_outer]" post_processor: "{{ obj | fix_interfaces | tojson }}" diff --git a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml index 17ed88a8..3ee55466 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml @@ -1,18 +1,28 @@ --- device_onboarding: - use_textfsm: true + # use_textfsm: true hostname: - command: "show version" + command: + command: "show version" + use_textfsm: true jpath: "[*].hostname" serial: - command: "show version" + command: + command: "show version" + use_textfsm: true jpath: "[*].serial" device_type: - command: "show version" + command: + command: "show version" + use_textfsm: true jpath: "[*].platform" mgmt_interface: - command: "show interface" + command: + command: "show interface" + use_textfsm: true jpath: "[?ip_address=='{{ obj }}'].interface || [`mgmt0`]" mask_length: - command: "show interface" + command: + command: "show interface" + use_textfsm: true jpath: "[?ip_address=='{{ obj }}'].prefix_length || [`31`]" diff --git a/nautobot_device_onboarding/command_mappers/juniper_junos.yml b/nautobot_device_onboarding/command_mappers/juniper_junos.yml index 8f24913c..7760c797 100755 --- a/nautobot_device_onboarding/command_mappers/juniper_junos.yml +++ b/nautobot_device_onboarding/command_mappers/juniper_junos.yml @@ -1,19 +1,30 @@ --- device_onboarding: - use_textfsm: true hostname: - command: "show version" + command: + command: "show version" + use_textfsm: true jpath: "[*].hostname" serial: - command: "show version" + command: + command: "show version" + use_textfsm: true jpath: "[*].serial_number" device_type: - command: "show chassis hardware" + command: + command: "show chassis hardware" + use_textfsm: true jpath: "[*].model" mgmt_interface: - command: "show interfaces" - jpath: "[?local=='{{ obj }}'].interface" + command: + command: "show configuration interfaces | display json" + use_textfsm: false + # jpath: '"interface-information"[]."physical-interface"[]."logical-interface"[].{name: name[].data, ip: "address-family"[]."interface-address"[]."ifa-local"[][data=="{{ obj }}"][]}' + jpath: "*.interfaces.interface[].{name: name, ip: unit[].family[].*.address[][].name}" + post_processor: "{% for entry in obj %}{% if entry['ip'] and original_host in entry['ip'] %}{{ entry['name'] }}{% endif %}{% endfor %}" mask_length: - command: "show interfaces" - jpath: "[?local=='{{ obj }}'].destination" - post_processor: "{{ obj[0] | ipaddress_interface('netmask') | netmask_to_cidr }}" + command: + command: "show configuration interfaces | display json" + use_textfsm: false + jpath: "*.interfaces.interface[].{name: name, ip: unit[].family[].*.address[][].name}" + post_processor: "{% for entry in obj %}{% if entry['ip'] and original_host in entry['ip'] %}{{ entry['ip'] | ipaddress_interface('netmask') | netmask_to_cidr }}{% endif %}{% endfor %}" diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index 3ce57956..c783073e 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -23,11 +23,11 @@ def _get_commands_to_run(yaml_parsed_info, command_getter_job): """Load yaml file and look up all commands that need to be run.""" commands = [] - for key, value in yaml_parsed_info[command_getter_job].items(): - if not key == "use_textfsm": + for _, value in yaml_parsed_info[command_getter_job].items(): + # Deduplicate commands + parser key + if value["command"] not in commands: commands.append(value["command"]) - print(f"COMMANDS: {commands}") - return list(set(commands)) + return commands def netmiko_send_commands(task: Task, command_getter_job: str): @@ -38,12 +38,11 @@ def netmiko_send_commands(task: Task, command_getter_job: str): return Result(host=task.host, result=f"{task.host.name} has a unsupported platform set.", failed=True) commands = _get_commands_to_run(task.host.data["platform_parsing_info"], command_getter_job) for command in commands: - command_use_textfsm = task.host.data["platform_parsing_info"][command_getter_job]["use_textfsm"] task.run( task=netmiko_send_command, - name=command, - command_string=command, - use_textfsm=command_use_textfsm, + name=command["command"], + command_string=command["command"], + use_textfsm=command["use_textfsm"], read_timeout=60, ) diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index eab8c629..156e5c60 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -1,18 +1,13 @@ """Formatter.""" import os - +import json import yaml from django.template import engines from django.utils.module_loading import import_string from jdiff import extract_data_from_json -# from jinja2 import exceptions as jinja_errors from jinja2.sandbox import SandboxedEnvironment - -# from nautobot.core.utils.data import render_jinja2 - -# from nautobot_device_onboarding.exceptions import OnboardException from nautobot_device_onboarding.utils.jinja_filters import fix_interfaces DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), "command_mappers")) @@ -38,47 +33,6 @@ def get_django_env(): return jinja_env -# def render_jinja_template(obj, template): -# """ -# Helper function to render Jinja templates. - -# Args: -# obj (Device): The Device object from Nautobot. -# template (str): A Jinja2 template to be rendered. - -# Returns: -# str: The ``template`` rendered. - -# Raises: -# NornirNautobotException: When there is an error rendering the ``template``. -# """ -# try: -# return render_jinja2(template_code=template, context={"obj": obj}) -# except jinja_errors.UndefinedError as error: -# error_msg = ( -# "`E3019:` Jinja encountered and UndefinedError`, check the template for missing variable definitions.\n" -# f"Template:\n{template}\n" -# f"Original Error: {error}" -# ) -# raise OnboardException(error_msg) from error - -# except jinja_errors.TemplateSyntaxError as error: # Also catches subclass of TemplateAssertionError -# error_msg = ( -# f"`E3020:` Jinja encountered a SyntaxError at line number {error.lineno}," -# f"check the template for invalid Jinja syntax.\nTemplate:\n{template}\n" -# f"Original Error: {error}" -# ) -# raise OnboardException(error_msg) from error -# # Intentionally not catching TemplateNotFound errors since template is passes as a string and not a filename -# except jinja_errors.TemplateError as error: # Catches all remaining Jinja errors -# error_msg = ( -# "`E3021:` Jinja encountered an unexpected TemplateError; check the template for correctness\n" -# f"Template:\n{template}\n" -# f"Original Error: {error}" -# ) -# raise OnboardException(error_msg) from error - - def load_yaml_datafile(filename): """Get the contents of the given YAML data file. @@ -109,23 +63,22 @@ def extract_show_data(host, multi_result, command_getter_type): result_dict = {} for default_dict_field, command_info in command_jpaths[command_getter_type].items(): - if not default_dict_field == "use_textfsm": - if command_info["command"] == multi_result[0].name: - jpath_template = jinja_env.from_string(command_info["jpath"]) - j2_rendered_jpath = jpath_template.render({"obj": host.name}) - # j2_rendered_jpath = render_jinja_template(obj=host.name, template=command_info["jpath"]) + if command_info["command"]["command"] == multi_result[0].name: + jpath_template = jinja_env.from_string(command_info["jpath"]) + j2_rendered_jpath = jpath_template.render({"obj": host.name}) + print(j2_rendered_jpath) + print(type(multi_result[0].result)) + if isinstance(multi_result[0].result, str): + extracted_value = extract_data_from_json(json.loads(multi_result[0].result), j2_rendered_jpath) + else: extracted_value = extract_data_from_json(multi_result[0].result, j2_rendered_jpath) - # print(extracted_value) - if command_info.get("post_processor"): - template = jinja_env.from_string(command_info["post_processor"]) - extracted_processed = template.render({"obj": extracted_value}) - # extracted_processed = render_jinja_template( - # obj=extracted_value, template=command_info["post_processor"] - # ) - # print(extracted_processed) - else: - extracted_processed = extracted_value - if isinstance(extracted_value, list) and len(extracted_value) == 1: - extracted_processed = extracted_value[0] - result_dict[default_dict_field] = extracted_processed + print(extracted_value) + if command_info.get("post_processor"): + template = jinja_env.from_string(command_info["post_processor"]) + extracted_processed = template.render({"obj": extracted_value, "original_host": host.name}) + else: + extracted_processed = extracted_value + if isinstance(extracted_value, list) and len(extracted_value) == 1: + extracted_processed = extracted_value[0] + result_dict[default_dict_field] = extracted_processed return result_dict From 7d6d3b1db910f09bf509c6b98ae9823108c3f7bb Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Wed, 28 Feb 2024 15:54:46 -0600 Subject: [PATCH 138/225] fix junos --- .../command_mappers/juniper_junos.yml | 23 ++++++++++--------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/juniper_junos.yml b/nautobot_device_onboarding/command_mappers/juniper_junos.yml index 7760c797..89d5ab80 100755 --- a/nautobot_device_onboarding/command_mappers/juniper_junos.yml +++ b/nautobot_device_onboarding/command_mappers/juniper_junos.yml @@ -2,29 +2,30 @@ device_onboarding: hostname: command: - command: "show version" - use_textfsm: true - jpath: "[*].hostname" + command: "show version | display json" + use_textfsm: false + jpath: '"software-information"[]."host-name"[].data' serial: command: - command: "show version" - use_textfsm: true - jpath: "[*].serial_number" + command: "show chassis hardware | display json" + use_textfsm: false + jpath: '"chassis-inventory"[]."chassis"[]."serial-number"[].data' device_type: command: - command: "show chassis hardware" - use_textfsm: true - jpath: "[*].model" + command: "show chassis hardware | display json" + use_textfsm: false + jpath: '"chassis-inventory"[]."chassis"[]."description"[].data' mgmt_interface: command: command: "show configuration interfaces | display json" use_textfsm: false # jpath: '"interface-information"[]."physical-interface"[]."logical-interface"[].{name: name[].data, ip: "address-family"[]."interface-address"[]."ifa-local"[][data=="{{ obj }}"][]}' jpath: "*.interfaces.interface[].{name: name, ip: unit[].family[].*.address[][].name}" - post_processor: "{% for entry in obj %}{% if entry['ip'] and original_host in entry['ip'] %}{{ entry['name'] }}{% endif %}{% endfor %}" + post_processor: "{% for entry in obj %}{% if entry['ip'] is not none %}{% for ipaddr in entry['ip'] %}{% if original_host in ipaddr %}{{ entry['name'] }}{% endif %}{% endfor %}{% endif %}{% endfor %}" mask_length: command: command: "show configuration interfaces | display json" use_textfsm: false jpath: "*.interfaces.interface[].{name: name, ip: unit[].family[].*.address[][].name}" - post_processor: "{% for entry in obj %}{% if entry['ip'] and original_host in entry['ip'] %}{{ entry['ip'] | ipaddress_interface('netmask') | netmask_to_cidr }}{% endif %}{% endfor %}" + # post_processor: "{% for entry in obj %}{% if entry['ip'] and original_host in entry['ip'] %}{{ entry['ip'] | ipaddress_interface('netmask') | netmask_to_cidr }}{% endif %}{% endfor %}" + post_processor: "{% for entry in obj %}{% if entry['ip'] is not none %}{% for ipaddr in entry['ip'] %}{% if ipaddr.split('/')[0] == original_host %}{{ ipaddr.split('/')[1] }}{% endif %}{% endfor %}{% endif %}{% endfor %}" From 277ad0169f3c451f6bad106aa72acb2a047d2fa8 Mon Sep 17 00:00:00 2001 From: David Cates Date: Wed, 28 Feb 2024 15:18:50 -0700 Subject: [PATCH 139/225] remove prefix_length from OnboardingDeviceModel --- .../diffsync/models/onboarding_models.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/models/onboarding_models.py b/nautobot_device_onboarding/diffsync/models/onboarding_models.py index 2d7b3f10..9c80a237 100644 --- a/nautobot_device_onboarding/diffsync/models/onboarding_models.py +++ b/nautobot_device_onboarding/diffsync/models/onboarding_models.py @@ -26,7 +26,6 @@ class OnboardingDevice(DiffSyncModel): "mask_length", "primary_ip4__host", "primary_ip4__status__name", - "prefix_length", "platform__name", "role__name", "secrets_group__name", @@ -42,7 +41,6 @@ class OnboardingDevice(DiffSyncModel): mask_length: Optional[int] primary_ip4__host: Optional[str] primary_ip4__status__name: Optional[str] - prefix_length: Optional[int] platform__name: Optional[str] role__name: Optional[str] secrets_group__name: Optional[str] @@ -86,8 +84,6 @@ def _get_or_create_device(cls, diffsync, ids, attrs): return None except ObjectDoesNotExist: - print(ids["name"], location) - print("does not exist") # Create Device device = Device( location=location, From 2ee7cfb65d47361a068ddbc71e90dd9ae30b5a16 Mon Sep 17 00:00:00 2001 From: David Cates Date: Thu, 29 Feb 2024 12:45:21 -0700 Subject: [PATCH 140/225] add data check for fields returned by devices --- .../diffsync/adapters/onboarding_adapters.py | 35 +++++++++++++++---- 1 file changed, 29 insertions(+), 6 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index dcdb1fb8..acf8a5a0 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -140,6 +140,7 @@ class OnboardingNetworkAdapter(diffsync.DiffSync): """Adapter for loading device data from a network.""" device_data = None + failed_ip_addresses = [] manufacturer = onboarding_models.OnboardingManufacturer platform = onboarding_models.OnboardingPlatform @@ -183,11 +184,9 @@ def _handle_failed_devices(self, device_data): f"{ip_address}: Connection or data error, this device will not be synced. " f"{device_data[ip_address].get('failed_reason')}" ) - failed_ip_addresses.append(ip_address) - for ip_address in failed_ip_addresses: + self.failed_ip_addresses.append(ip_address) + for ip_address in self.failed_ip_addresses: del device_data[ip_address] - if failed_ip_addresses: - self.job.logger.warning(f"Failed IP Addresses: {failed_ip_addresses}") self.device_data = device_data def execute_command_getter(self): @@ -279,6 +278,20 @@ def load_device_types(self): f"{ip_address}: Unable to load DeviceType due to missing key in returned data, {err}" ) + def _fields_missing_data(self, device_data, ip_address, platform): + """Verify that all of the fields returned from a device actually contain data.""" + fields_missing_data = [] + required_fields_from_device = ["device_type", "hostname", "mgmt_interface", "mask_length", "serial"] + + if platform: # platform is only retruned with device data if not provided on the job form/csv + required_fields_from_device.append("platform") + + for field in required_fields_from_device: + if not device_data[ip_address][field]: + fields_missing_data.append(field) + + return fields_missing_data + def load_devices(self): """Load devices into the DiffSync store.""" for ip_address in self.device_data: @@ -320,6 +333,15 @@ def load_devices(self): mask_length=int(self.device_data[ip_address]["mask_length"]), serial=self.device_data[ip_address]["serial"], ) # type: ignore + except KeyError as err: + self.job.logger.error(f"{ip_address}: Unable to load Device due to missing key in returned data, {err}") + + fields_missing_data = self._fields_missing_data( + device_data=self.device_data, ip_address=ip_address, platform=platform) + if fields_missing_data: + self.failed_ip_addresses.append(ip_address) + self.job.logger.error(f"Unable to onbaord {ip_address}, returned data missing for {fields_missing_data}") + else: try: self.add(onboarding_device) if self.job.debug: @@ -331,8 +353,6 @@ def load_devices(self): f"[Serial Number: {self.device_data[ip_address]['serial']}, " f"IP Address: {ip_address}]" ) - except KeyError as err: - self.job.logger.error(f"{ip_address}: Unable to load Device due to missing key in returned data, {err}") def load(self): """Load network data.""" @@ -342,3 +362,6 @@ def load(self): self.load_platforms() self.load_device_types() self.load_devices() + + if self.failed_ip_addresses: + self.job.logger.warning(f"Failed IP Addresses: {self.failed_ip_addresses}") \ No newline at end of file From 75cbd32b2d6e7f7c701f33ddc752ff207966ac5f Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Thu, 29 Feb 2024 15:35:53 -0600 Subject: [PATCH 141/225] enhance junos mgmt int and mask captures --- .../command_mappers/juniper_junos.yml | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/juniper_junos.yml b/nautobot_device_onboarding/command_mappers/juniper_junos.yml index 89d5ab80..2b71a7c0 100755 --- a/nautobot_device_onboarding/command_mappers/juniper_junos.yml +++ b/nautobot_device_onboarding/command_mappers/juniper_junos.yml @@ -20,12 +20,14 @@ device_onboarding: command: "show configuration interfaces | display json" use_textfsm: false # jpath: '"interface-information"[]."physical-interface"[]."logical-interface"[].{name: name[].data, ip: "address-family"[]."interface-address"[]."ifa-local"[][data=="{{ obj }}"][]}' - jpath: "*.interfaces.interface[].{name: name, ip: unit[].family[].*.address[][].name}" + # jpath: "*.interfaces.interface[].{name: name, ip: unit[].family[].*.address[][].name}" + jpath: '"interface-information"[]."physical-interface"[]."logical-interface"[].{name: name[].data, ip: "address-family"[]."interface-address"[]."ifa-local"[].data[]}' post_processor: "{% for entry in obj %}{% if entry['ip'] is not none %}{% for ipaddr in entry['ip'] %}{% if original_host in ipaddr %}{{ entry['name'] }}{% endif %}{% endfor %}{% endif %}{% endfor %}" mask_length: command: - command: "show configuration interfaces | display json" + command: "show route protocol direct | display json" use_textfsm: false - jpath: "*.interfaces.interface[].{name: name, ip: unit[].family[].*.address[][].name}" + # jpath: "*.interfaces.interface[].{name: name, ip: unit[].family[].*.address[][].name}" # post_processor: "{% for entry in obj %}{% if entry['ip'] and original_host in entry['ip'] %}{{ entry['ip'] | ipaddress_interface('netmask') | netmask_to_cidr }}{% endif %}{% endfor %}" - post_processor: "{% for entry in obj %}{% if entry['ip'] is not none %}{% for ipaddr in entry['ip'] %}{% if ipaddr.split('/')[0] == original_host %}{{ ipaddr.split('/')[1] }}{% endif %}{% endfor %}{% endif %}{% endfor %}" + jpath: '"route-information"[]."route-table"[]."rt"[]."rt-destination"[].data' + post_processor: "{% for ipaddr in obj %}{% if ipaddr.split('/')[0] == original_host %}{{ ipaddr.split('/')[1] }}{% endif %}{% endfor %}{% endif %}{% endfor %}" From 4fc097afdcb07aceb1fd2a2e77760eb409f76aeb Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Thu, 29 Feb 2024 15:41:53 -0600 Subject: [PATCH 142/225] enhance junos mgmt int and mask captures --- nautobot_device_onboarding/command_mappers/juniper_junos.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/command_mappers/juniper_junos.yml b/nautobot_device_onboarding/command_mappers/juniper_junos.yml index 2b71a7c0..1a73b1a7 100755 --- a/nautobot_device_onboarding/command_mappers/juniper_junos.yml +++ b/nautobot_device_onboarding/command_mappers/juniper_junos.yml @@ -30,4 +30,4 @@ device_onboarding: # jpath: "*.interfaces.interface[].{name: name, ip: unit[].family[].*.address[][].name}" # post_processor: "{% for entry in obj %}{% if entry['ip'] and original_host in entry['ip'] %}{{ entry['ip'] | ipaddress_interface('netmask') | netmask_to_cidr }}{% endif %}{% endfor %}" jpath: '"route-information"[]."route-table"[]."rt"[]."rt-destination"[].data' - post_processor: "{% for ipaddr in obj %}{% if ipaddr.split('/')[0] == original_host %}{{ ipaddr.split('/')[1] }}{% endif %}{% endfor %}{% endif %}{% endfor %}" + post_processor: "{% for ipaddr in obj %}{% if ipaddr.split('/')[0] == original_host %}{{ ipaddr.split('/')[1] }}{% endif %}{% endfor %}" From 4c579159712c4097b7e7b69be440a905c55737fa Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Thu, 29 Feb 2024 16:45:59 -0600 Subject: [PATCH 143/225] fixes for most jnos --- .../command_mappers/juniper_junos.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/juniper_junos.yml b/nautobot_device_onboarding/command_mappers/juniper_junos.yml index 1a73b1a7..c603c4c7 100755 --- a/nautobot_device_onboarding/command_mappers/juniper_junos.yml +++ b/nautobot_device_onboarding/command_mappers/juniper_junos.yml @@ -17,12 +17,12 @@ device_onboarding: jpath: '"chassis-inventory"[]."chassis"[]."description"[].data' mgmt_interface: command: - command: "show configuration interfaces | display json" + command: "show interfaces | display json" use_textfsm: false # jpath: '"interface-information"[]."physical-interface"[]."logical-interface"[].{name: name[].data, ip: "address-family"[]."interface-address"[]."ifa-local"[][data=="{{ obj }}"][]}' # jpath: "*.interfaces.interface[].{name: name, ip: unit[].family[].*.address[][].name}" - jpath: '"interface-information"[]."physical-interface"[]."logical-interface"[].{name: name[].data, ip: "address-family"[]."interface-address"[]."ifa-local"[].data[]}' - post_processor: "{% for entry in obj %}{% if entry['ip'] is not none %}{% for ipaddr in entry['ip'] %}{% if original_host in ipaddr %}{{ entry['name'] }}{% endif %}{% endfor %}{% endif %}{% endfor %}" + jpath: '"interface-information"[]."physical-interface"[]."logical-interface"[].{name: name[].data, ip: "address-family"[]."interface-address"[]."ifa-local"[].data}' + post_processor: "{% for entry in obj %}{% if entry['ip'] is not none %}{% for ipaddr in entry['ip'] %}{% if original_host in ipaddr %}{{ entry['name'] | first }}{% endif %}{% endfor %}{% endif %}{% endfor %}" mask_length: command: command: "show route protocol direct | display json" From 5b71220fd5316530120c8c9096d0eefbea49175e Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Tue, 5 Mar 2024 08:00:32 -0600 Subject: [PATCH 144/225] rework yaml format and processor formatter --- .../command_mappers/arista_eos.yml | 40 +++--- .../command_mappers/cisco_ios.yml | 120 +++++++++++++----- .../command_mappers/cisco_nxos.yml | 41 +++--- .../command_mappers/juniper_junos.yml | 48 ++++--- .../nornir_plays/command_getter.py | 54 ++++++-- .../nornir_plays/processor.py | 57 +++++---- nautobot_device_onboarding/utils/formatter.py | 69 +++++++--- 7 files changed, 269 insertions(+), 160 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/arista_eos.yml b/nautobot_device_onboarding/command_mappers/arista_eos.yml index d4fd406b..490c7099 100755 --- a/nautobot_device_onboarding/command_mappers/arista_eos.yml +++ b/nautobot_device_onboarding/command_mappers/arista_eos.yml @@ -1,27 +1,27 @@ --- device_onboarding: hostname: - command: - command: "show hostname" - use_textfsm: true - jpath: "[*].hostname" + commands: + - command: "show hostname" + use_textfsm: true + jpath: "[*].hostname" serial: - command: - command: "show version" - use_textfsm: true - jpath: "[*].serial_number" + commands: + - command: "show version" + use_textfsm: true + jpath: "[*].serial_number" device_type: - command: - command: "show version" - use_textfsm: true - jpath: "[*].model" + commands: + - command: "show version" + use_textfsm: true + jpath: "[*].model" mgmt_interface: - command: - command: "show ip interface brief | json" - use_textfsm: false - jpath: "interfaces.*.{name: name, match_ip: interfaceAddress.ipAddr.address=='{{ obj }}'}[? match_ip].name" + commands: + - command: "show ip interface brief | json" + use_textfsm: false + jpath: "interfaces.*.{name: name, match_ip: interfaceAddress.ipAddr.address=='{{ obj }}'}[? match_ip].name" mask_length: - command: - command: "show ip interface brief | json" - use_textfsm: false - jpath: "interfaces.*.interfaceAddress[?ipAddr.address=='{{ obj }}'].ipAddr.maskLen" + commands: + - command: "show ip interface brief | json" + use_textfsm: false + jpath: "interfaces.*.interfaceAddress[?ipAddr.address=='{{ obj }}'].ipAddr.maskLen" diff --git a/nautobot_device_onboarding/command_mappers/cisco_ios.yml b/nautobot_device_onboarding/command_mappers/cisco_ios.yml index 8c351613..633386ed 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_ios.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_ios.yml @@ -1,44 +1,96 @@ --- device_onboarding: - # use_textfsm: true hostname: - jpath: "[*].hostname" - command: - command: "show version" - use_textfsm: true - post_processor: "{{ obj[0] | upper }}" + commands: + - command: "show version" + use_textfsm: true + post_processor: "{{ obj[0] | upper }}" + jpath: "[*].hostname" serial: - command: - command: "show version" - use_textfsm: true - jpath: "[*].serial[0]" + commands: + - command: "show version" + use_textfsm: true + jpath: "[*].serial[0]" device_type: - command: - command: "show version" - use_textfsm: true - jpath: "[*].hardware[0]" + commands: + - command: "show version" + use_textfsm: true + jpath: "[*].hardware[0]" mgmt_interface: - command: - command: "show interfaces" - use_textfsm: true - jpath: "[?ip_address=='{{ obj }}'].{name: interface, enabled: link_status}" - post_processor: "{% for interface in obj %}{% if 'up' in interface['enabled'] %}{{ interface.name }}{% endif %}{% endfor %}" + commands: + - command: "show interfaces" + use_textfsm: true + jpath: "[?ip_address=='{{ obj }}'].{name: interface, enabled: link_status}" + post_processor: "{% for interface in obj %}{% if 'up' in interface['enabled'] %}{{ interface.name }}{% endif %}{% endfor %}" mask_length: - command: - command: "show interfaces" - use_textfsm: true - jpath: "[?ip_address=='{{ obj }}'].prefix_length" - post_processor: "{{ obj | unique | first }}" + commands: + - command: "show interfaces" + use_textfsm: true + jpath: "[?ip_address=='{{ obj }}'].prefix_length" + post_processor: "{{ obj | unique | first }}" network_importer: - # use_textfsm: true serial: - command: - command: "show version" - use_textfsm: true - jpath: "[*].serial[0]" + commands: + - command: "show version" + use_textfsm: true + jpath: "[*].serial[0]" interfaces: - command: - command: "show interfaces" - use_textfsm: true - jpath: "[*].[$interface$,hardware_type,ip_address,prefix_length,mac_address,mtu,description,link_status,vlan_id,vlan_id_inner,vlan_id_outer]" - post_processor: "{{ obj | fix_interfaces | tojson }}" + interfaces: + commands: + - command: "show interfaces" + use_textfsm: true + jpath: "[*].{interface: interface}" + type: + commands: + - command: "show interfaces" + use_textfsm: true + jpath: "[*].{interface: interface, hardware_type: hardware_type}" + ip_addresses: + commands: + - command: "show interfaces" + use_textfsm: true + jpath: "[*].{interface: interface, ip_address: ip_address}" + mtu: + commands: + - command: "show interfaces" + use_textfsm: true + jpath: "[*].{interface: interface, mtu: mtu}" + mac_address: + commands: + - command: "show interfaces" + use_textfsm: true + jpath: "[*].{interface: interface, mac_address: mac_address}" + description: + commands: + - command: "show interfaces" + use_textfsm: true + jpath: "[*].{interface: interface, description: description}" + link_status: + commands: + - command: "show interfaces" + use_textfsm: true + jpath: "[*].{interface: interface, link_status: link_status}" + dot1q_mode: + commands: + - command: "show interfaces switchport" + use_textfsm: true + jpath: "[*].mode" + - command: "show interfaces" + use_textfsm: true + jpath: "[*].{interface: interface, encapsulation: encapsulation}" + validator_pattern: "not None" + lag: + commands: + - command: "show etherchannel summary" + use_textfsm: true + jpath: "[*].protocol" + untagged_vlan: + commands: + - command: "show vlans" + use_textfsm: true + jpath: "[*].vlan_id" + tagged_vlans: + commands: + - command: "show vlans" + use_textfsm: true + jpath: "[*].vlan_id" diff --git a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml index 3ee55466..7ea67bc0 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml @@ -1,28 +1,27 @@ --- device_onboarding: - # use_textfsm: true hostname: - command: - command: "show version" - use_textfsm: true - jpath: "[*].hostname" + commands: + - command: "show version" + use_textfsm: true + jpath: "[*].hostname" serial: - command: - command: "show version" - use_textfsm: true - jpath: "[*].serial" + commands: + - command: "show version" + use_textfsm: true + jpath: "[*].serial" device_type: - command: - command: "show version" - use_textfsm: true - jpath: "[*].platform" + commands: + - command: "show version" + use_textfsm: true + jpath: "[*].platform" mgmt_interface: - command: - command: "show interface" - use_textfsm: true - jpath: "[?ip_address=='{{ obj }}'].interface || [`mgmt0`]" + commands: + - command: "show interface" + use_textfsm: true + jpath: "[?ip_address=='{{ obj }}'].interface || [`mgmt0`]" mask_length: - command: - command: "show interface" - use_textfsm: true - jpath: "[?ip_address=='{{ obj }}'].prefix_length || [`31`]" + commands: + - command: "show interface" + use_textfsm: true + jpath: "[?ip_address=='{{ obj }}'].prefix_length || [`31`]" diff --git a/nautobot_device_onboarding/command_mappers/juniper_junos.yml b/nautobot_device_onboarding/command_mappers/juniper_junos.yml index c603c4c7..963d54f9 100755 --- a/nautobot_device_onboarding/command_mappers/juniper_junos.yml +++ b/nautobot_device_onboarding/command_mappers/juniper_junos.yml @@ -1,33 +1,29 @@ --- device_onboarding: hostname: - command: - command: "show version | display json" - use_textfsm: false - jpath: '"software-information"[]."host-name"[].data' + commands: + - command: "show version | display json" + use_textfsm: false + jpath: '"software-information"[]."host-name"[].data' serial: - command: - command: "show chassis hardware | display json" - use_textfsm: false - jpath: '"chassis-inventory"[]."chassis"[]."serial-number"[].data' + commands: + - command: "show chassis hardware | display json" + use_textfsm: false + jpath: '"chassis-inventory"[]."chassis"[]."serial-number"[].data' device_type: - command: - command: "show chassis hardware | display json" - use_textfsm: false - jpath: '"chassis-inventory"[]."chassis"[]."description"[].data' + commands: + - command: "show chassis hardware | display json" + use_textfsm: false + jpath: '"chassis-inventory"[]."chassis"[]."description"[].data' mgmt_interface: - command: - command: "show interfaces | display json" - use_textfsm: false - # jpath: '"interface-information"[]."physical-interface"[]."logical-interface"[].{name: name[].data, ip: "address-family"[]."interface-address"[]."ifa-local"[][data=="{{ obj }}"][]}' - # jpath: "*.interfaces.interface[].{name: name, ip: unit[].family[].*.address[][].name}" - jpath: '"interface-information"[]."physical-interface"[]."logical-interface"[].{name: name[].data, ip: "address-family"[]."interface-address"[]."ifa-local"[].data}' - post_processor: "{% for entry in obj %}{% if entry['ip'] is not none %}{% for ipaddr in entry['ip'] %}{% if original_host in ipaddr %}{{ entry['name'] | first }}{% endif %}{% endfor %}{% endif %}{% endfor %}" + commands: + - command: "show interfaces | display json" + use_textfsm: false + jpath: '"interface-information"[]."physical-interface"[]."logical-interface"[].{name: name[].data, ip: "address-family"[]."interface-address"[]."ifa-local"[].data}' + post_processor: "{% for entry in obj %}{% if entry['ip'] is not none %}{% for ipaddr in entry['ip'] %}{% if original_host in ipaddr %}{{ entry['name'] | first }}{% endif %}{% endfor %}{% endif %}{% endfor %}" mask_length: - command: - command: "show route protocol direct | display json" - use_textfsm: false - # jpath: "*.interfaces.interface[].{name: name, ip: unit[].family[].*.address[][].name}" - # post_processor: "{% for entry in obj %}{% if entry['ip'] and original_host in entry['ip'] %}{{ entry['ip'] | ipaddress_interface('netmask') | netmask_to_cidr }}{% endif %}{% endfor %}" - jpath: '"route-information"[]."route-table"[]."rt"[]."rt-destination"[].data' - post_processor: "{% for ipaddr in obj %}{% if ipaddr.split('/')[0] == original_host %}{{ ipaddr.split('/')[1] }}{% endif %}{% endfor %}" + commands: + - command: "show route protocol direct | display json" + use_textfsm: false + jpath: '"route-information"[]."route-table"[]."rt"[]."rt-destination"[].data' + post_processor: "{% for ipaddr in obj %}{% if ipaddr.split('/')[0] == original_host %}{{ ipaddr.split('/')[1] }}{% endif %}{% endfor %}" diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index fca0f3a9..1b1c44dc 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -8,6 +8,7 @@ from nornir import InitNornir from nornir.core.plugins.inventory import InventoryPluginRegister, TransformFunctionRegister from nornir.core.task import Result, Task +from nornir.core.exceptions import NornirSubTaskError from nornir_netmiko.tasks import netmiko_send_command from nautobot_device_onboarding.constants import NETMIKO_TO_NAPALM_STATIC @@ -22,14 +23,42 @@ TransformFunctionRegister.register("transform_to_add_command_parser_info", add_platform_parsing_info) +def deduplicate_command_list(data): + """Deduplicates a list of dictionaries based on 'command' and 'use_textfsm' keys. + + Args: + data: A list of dictionaries. + + Returns: + A new list containing only unique elements based on 'command' and 'use_textfsm'. + """ + seen = set() + unique_list = [] + for item in data: + # Create a tuple containing only 'command' and 'use_textfsm' for comparison + key = (item['command'], item['use_textfsm']) + if key not in seen: + seen.add(key) + unique_list.append(item) + return unique_list + + def _get_commands_to_run(yaml_parsed_info, command_getter_job): """Load yaml file and look up all commands that need to be run.""" - commands = [] + all_commands = [] for _, value in yaml_parsed_info[command_getter_job].items(): # Deduplicate commands + parser key - if value["command"] not in commands: - commands.append(value["command"]) - return commands + if value.get("commands"): + # Means their isn't any "nested" structures. + for command in value["commands"]: + all_commands.append(command) + else: + # Means their is a "nested" structures. + for _, nested_command_info in value.items(): + if isinstance(nested_command_info, dict): + for command in nested_command_info["commands"]: + all_commands.append(command) + return deduplicate_command_list(all_commands) def netmiko_send_commands(task: Task, command_getter_job: str): @@ -40,13 +69,16 @@ def netmiko_send_commands(task: Task, command_getter_job: str): return Result(host=task.host, result=f"{task.host.name} has a unsupported platform set.", failed=True) commands = _get_commands_to_run(task.host.data["platform_parsing_info"], command_getter_job) for command in commands: - task.run( - task=netmiko_send_command, - name=command["command"], - command_string=command["command"], - use_textfsm=command["use_textfsm"], - read_timeout=60, - ) + try: + task.run( + task=netmiko_send_command, + name=command["command"], + command_string=command["command"], + use_textfsm=command["use_textfsm"], + read_timeout=60, + ) + except NornirSubTaskError: + Result(host=task.host, changed=False, result=f"{command['command']}: E0001 - Textfsm template issue.", failed=True) def command_getter_do(job_result, log_level, kwargs): diff --git a/nautobot_device_onboarding/nornir_plays/processor.py b/nautobot_device_onboarding/nornir_plays/processor.py index 71e304ff..672aa3ae 100755 --- a/nautobot_device_onboarding/nornir_plays/processor.py +++ b/nautobot_device_onboarding/nornir_plays/processor.py @@ -4,7 +4,6 @@ from nornir.core.inventory import Host from nornir.core.task import MultiResult, Task -from nornir_nautobot.exceptions import NornirNautobotException from nornir_nautobot.plugins.processors import BaseLoggingProcessor from nautobot_device_onboarding.utils.formatter import extract_show_data @@ -37,27 +36,29 @@ def task_instance_completed(self, task: Task, host: Host, result: MultiResult) - """ # Complex logic to see if the task exception is expected, which is depicted by # a sub task raising a NornirNautobotException. - if result.failed: - for level_1_result in result: - if hasattr(level_1_result, "exception") and hasattr(level_1_result.exception, "result"): - for level_2_result in level_1_result.exception.result: # type: ignore - if isinstance(level_2_result.exception, NornirNautobotException): - return - self.logger.critical(f"{task.name} failed: {result.exception}", extra={"object": task.host}) - else: - self.logger.info( - f"task_instance_completed Task Name: {task.name} Task Result: {result.result}", - extra={"object": task.host}, - ) - if result.name == "netmiko_send_commands": - self.data[host.name].update( - { - "failed": result.failed, - } - ) - if result.failed: - self.logger.warning(f"Task Failed! Result {result.result}.", extra={"object": task.host}) - self.data[host.name]["failed_reason"] = result.result + # if result.failed: + # for level_1_result in result: + # if hasattr(level_1_result, "exception") and hasattr(level_1_result.exception, "result"): + # print("inside level2 hasatter") + # for level_2_result in level_1_result.exception.result: # type: ignore + # print("inside the level2") + # if isinstance(level_2_result.exception, NornirNautobotException): + # return + # self.logger.critical(f"{task.name} failed: {result.exception}", extra={"object": task.host}) + # else: + self.logger.info( + f"task_instance_completed Task Name: {task.name} Task Result: {result.result}", + extra={"object": task.host}, + ) + # if result.name == "netmiko_send_commands": + # self.data[host.name].update( + # { + # "failed": result.failed, + # } + # ) + # if result.failed: + # self.logger.warning(f"Task Failed! Result {result.result}.", extra={"object": task.host}) + # self.data[host.name]["failed_reason"] = result.result def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult) -> None: """Processor for logging and data processing on subtask completed.""" @@ -65,11 +66,13 @@ def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult if self.kwargs["debug"]: self.logger.debug(f"subtask_instance_completed Subtask result {result.result}.", extra={"object": task.host}) - self.data[host.name].update( - { - "failed": result.failed, - } - ) + # self.data[host.name].update( + # { + # "failed": result.failed, + # + # } + # ) + # if not result.failed: formatted_data = extract_show_data(host, result, task.parent_task.params["command_getter_job"]) # revist should be able to just update self.data with full formatted_data for k, v in formatted_data.items(): diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index 156e5c60..ac9ff63b 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -46,6 +46,43 @@ def load_yaml_datafile(filename): return data +def perform_data_extraction(host, dict_field, command_info_dict, j2_env, task_result): + """Extract, process data.""" + result_dict = {} + for show_command in command_info_dict["commands"]: + if show_command["command"] == task_result.name: + jpath_template = j2_env.from_string(show_command["jpath"]) + j2_rendered_jpath = jpath_template.render({"obj": host.name}) + if not task_result.failed: + if isinstance(task_result.result, str): + try: + result_to_json = json.loads(task_result.result) + extracted_value = extract_data_from_json(result_to_json, j2_rendered_jpath) + except json.decoder.JSONDecodeError: + extracted_value = None + else: + extracted_value = extract_data_from_json(task_result.result, j2_rendered_jpath) + if show_command.get("post_processor"): + template = j2_env.from_string(show_command["post_processor"]) + extracted_processed = template.render({"obj": extracted_value, "original_host": host.name}) + else: + extracted_processed = extracted_value + if isinstance(extracted_value, list) and len(extracted_value) == 1: + extracted_processed = extracted_value[0] + if command_info_dict.get("validator_pattern"): + # temp validator + if command_info_dict["validator_pattern"] == "not None": + if not extracted_processed: + print("validator pattern not detected, checking next command.") + continue + else: + print("About to break the sequence due to valid pattern found") + result_dict[dict_field] = extracted_processed + break + result_dict[dict_field] = extracted_processed + return result_dict + + def extract_show_data(host, multi_result, command_getter_type): """Take a result of show command and extra specific needed data. @@ -60,25 +97,15 @@ def extract_show_data(host, multi_result, command_getter_type): if host_platform == "cisco_xe": host_platform = "cisco_ios" command_jpaths = host.data["platform_parsing_info"] - - result_dict = {} + final_result_dict = {} for default_dict_field, command_info in command_jpaths[command_getter_type].items(): - if command_info["command"]["command"] == multi_result[0].name: - jpath_template = jinja_env.from_string(command_info["jpath"]) - j2_rendered_jpath = jpath_template.render({"obj": host.name}) - print(j2_rendered_jpath) - print(type(multi_result[0].result)) - if isinstance(multi_result[0].result, str): - extracted_value = extract_data_from_json(json.loads(multi_result[0].result), j2_rendered_jpath) - else: - extracted_value = extract_data_from_json(multi_result[0].result, j2_rendered_jpath) - print(extracted_value) - if command_info.get("post_processor"): - template = jinja_env.from_string(command_info["post_processor"]) - extracted_processed = template.render({"obj": extracted_value, "original_host": host.name}) - else: - extracted_processed = extracted_value - if isinstance(extracted_value, list) and len(extracted_value) == 1: - extracted_processed = extracted_value[0] - result_dict[default_dict_field] = extracted_processed - return result_dict + if command_info.get("commands"): + # Means their isn't any "nested" structures. Therefore not expected to see "validator_pattern key" + result = perform_data_extraction(host, default_dict_field, command_info, jinja_env, multi_result[0]) + final_result_dict.update(result) + else: + # Means their is a "nested" structures. Priority + for dict_field, nested_command_info in command_info.items(): + result = perform_data_extraction(host, dict_field, nested_command_info, jinja_env, multi_result[0]) + final_result_dict.update(result) + return final_result_dict From ca6bcab6b879f0272d698ec89c942d561e4c828c Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Thu, 7 Mar 2024 18:13:42 +0000 Subject: [PATCH 145/225] fixed nxos serial --- nautobot_device_onboarding/command_mappers/cisco_nxos.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml index 7ea67bc0..5bae0cfc 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml @@ -7,9 +7,9 @@ device_onboarding: jpath: "[*].hostname" serial: commands: - - command: "show version" + - command: "show inventory" use_textfsm: true - jpath: "[*].serial" + jpath: "[?name=='Chassis'].sn" device_type: commands: - command: "show version" From 7ea29c8c5e97d50702e6ff92868c704c4b48b514 Mon Sep 17 00:00:00 2001 From: David Cates Date: Thu, 7 Mar 2024 17:14:40 -0700 Subject: [PATCH 146/225] improve logging and calls to secrets providers --- .../diffsync/adapters/onboarding_adapters.py | 22 +++--- .../diffsync/models/onboarding_models.py | 18 +++-- nautobot_device_onboarding/jobs.py | 13 ++-- .../nornir_plays/command_getter.py | 76 +++++++++++++++---- .../nornir_plays/logger.py | 2 +- .../nornir_plays/processor.py | 22 +++--- .../utils/diffsync_utils.py | 2 - nautobot_device_onboarding/utils/formatter.py | 7 +- .../utils/inventory_creator.py | 34 +-------- 9 files changed, 109 insertions(+), 87 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index acf8a5a0..bc4a7b29 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -176,14 +176,9 @@ def _handle_failed_devices(self, device_data): If a device fails to return expected data, log the result and remove it from the data to be loaded into the diffsync store. """ - failed_ip_addresses = [] - for ip_address in device_data: - if device_data[ip_address].get("failed"): - self.job.logger.error( - f"{ip_address}: Connection or data error, this device will not be synced. " - f"{device_data[ip_address].get('failed_reason')}" - ) + if not device_data[ip_address]: + self.job.logger.error(f"{ip_address}: Connection or data error, this device will not be synced.") self.failed_ip_addresses.append(ip_address) for ip_address in self.failed_ip_addresses: del device_data[ip_address] @@ -283,7 +278,7 @@ def _fields_missing_data(self, device_data, ip_address, platform): fields_missing_data = [] required_fields_from_device = ["device_type", "hostname", "mgmt_interface", "mask_length", "serial"] - if platform: # platform is only retruned with device data if not provided on the job form/csv + if platform: # platform is only retruned with device data if not provided on the job form/csv required_fields_from_device.append("platform") for field in required_fields_from_device: @@ -335,12 +330,17 @@ def load_devices(self): ) # type: ignore except KeyError as err: self.job.logger.error(f"{ip_address}: Unable to load Device due to missing key in returned data, {err}") + except ValueError as err: + self.job.logger.error(f"{ip_address}: Unable to load Device due to invalid data type in data return, {err}") fields_missing_data = self._fields_missing_data( - device_data=self.device_data, ip_address=ip_address, platform=platform) + device_data=self.device_data, ip_address=ip_address, platform=platform + ) if fields_missing_data: self.failed_ip_addresses.append(ip_address) - self.job.logger.error(f"Unable to onbaord {ip_address}, returned data missing for {fields_missing_data}") + self.job.logger.error( + f"Unable to onbaord {ip_address}, returned data missing for {fields_missing_data}" + ) else: try: self.add(onboarding_device) @@ -364,4 +364,4 @@ def load(self): self.load_devices() if self.failed_ip_addresses: - self.job.logger.warning(f"Failed IP Addresses: {self.failed_ip_addresses}") \ No newline at end of file + self.job.logger.warning(f"Failed IP Addresses: {self.failed_ip_addresses}") diff --git a/nautobot_device_onboarding/diffsync/models/onboarding_models.py b/nautobot_device_onboarding/diffsync/models/onboarding_models.py index 9c80a237..ea6fa121 100644 --- a/nautobot_device_onboarding/diffsync/models/onboarding_models.py +++ b/nautobot_device_onboarding/diffsync/models/onboarding_models.py @@ -134,18 +134,26 @@ def _get_or_create_interface(cls, diffsync, device, attrs): def _update_device_with_attrs(cls, device, platform, ids, attrs, diffsync): """Update a Nautobot device instance.""" device.location = diffsync_utils.retrieve_submitted_value( - job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="location", + job=diffsync.job, + ip_address=attrs["primary_ip4__host"], + query_string="location", ) device.status = diffsync_utils.retrieve_submitted_value( - job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="device_status", + job=diffsync.job, + ip_address=attrs["primary_ip4__host"], + query_string="device_status", ) device.role = diffsync_utils.retrieve_submitted_value( - job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="device_role", + job=diffsync.job, + ip_address=attrs["primary_ip4__host"], + query_string="device_role", ) device.device_type = DeviceType.objects.get(model=attrs["device_type__model"]) device.platform = platform device.secrets_group = diffsync_utils.retrieve_submitted_value( - job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="secrets_group", + job=diffsync.job, + ip_address=attrs["primary_ip4__host"], + query_string="secrets_group", ) device.serial = ids["serial"] @@ -155,7 +163,7 @@ def _update_device_with_attrs(cls, device, platform, ids, attrs, diffsync): def create(cls, diffsync, ids, attrs): """Create a new nautobot device using data scraped from a device.""" if diffsync.job.debug: - diffsync.job.logger.debug("Creating device {ids} with {attrs}") + diffsync.job.logger.debug(f"Creating device {ids} with {attrs}") # Get or create Device, Interface and IP Address device = cls._get_or_create_device(diffsync, ids, attrs) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 84c4feed..0c27154e 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -296,13 +296,12 @@ def _convert_sring_to_bool(self, string, header): """Given a string of 'true' or 'false' convert to bool.""" if string.lower() == "true": return True - elif string.lower() == "false": + if string.lower() == "false": return False - else: - raise ValidationError( - f"'{string}' in column '{header}' failed to convert to a boolean value. " - "Please use either 'True' or 'False'." - ) + raise ValidationError( + f"'{string}' in column '{header}' failed to convert to a boolean value. " + "Please use either 'True' or 'False'." + ) def _process_csv_data(self, csv_file): """ "Convert CSV data into a dictionary containing Nautobot objects.""" @@ -314,6 +313,7 @@ def _process_csv_data(self, csv_file): processed_csv_data = {} row_count = 1 for row in csv_reader: + query = None try: query = f"location_name: {row.get('location_name')}, location_parent_name: {row.get('location_parent_name')}" if row.get("location_parent_name"): @@ -393,7 +393,6 @@ def _process_csv_data(self, csv_file): row_count += 1 except ValidationError as err: self.logger.error(f"(row {sum([row_count, 1])}), {err}") - self.logger.error row_count += 1 if processing_failed: processed_csv_data = None diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index 1b1c44dc..d21feab8 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -1,14 +1,16 @@ """CommandGetter.""" # pylint: disable=relative-beyond-top-level +from django.conf import settings from nautobot.dcim.models import Platform +from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from nautobot.extras.models import SecretsGroup from nautobot_plugin_nornir.constants import NORNIR_SETTINGS from nautobot_plugin_nornir.plugins.inventory.nautobot_orm import NautobotORMInventory from nornir import InitNornir +from nornir.core.exceptions import NornirSubTaskError from nornir.core.plugins.inventory import InventoryPluginRegister, TransformFunctionRegister from nornir.core.task import Result, Task -from nornir.core.exceptions import NornirSubTaskError from nornir_netmiko.tasks import netmiko_send_command from nautobot_device_onboarding.constants import NETMIKO_TO_NAPALM_STATIC @@ -36,7 +38,7 @@ def deduplicate_command_list(data): unique_list = [] for item in data: # Create a tuple containing only 'command' and 'use_textfsm' for comparison - key = (item['command'], item['use_textfsm']) + key = (item["command"], item["use_textfsm"]) if key not in seen: seen.add(key) unique_list.append(item) @@ -78,7 +80,40 @@ def netmiko_send_commands(task: Task, command_getter_job: str): read_timeout=60, ) except NornirSubTaskError: - Result(host=task.host, changed=False, result=f"{command['command']}: E0001 - Textfsm template issue.", failed=True) + Result( + host=task.host, + changed=False, + result=f"{command['command']}: E0001 - Textfsm template issue.", + failed=True, + ) + + +def _parse_credentials(credentials): + """Parse and return dictionary of credentials.""" + if credentials: + try: + username = credentials.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, + secret_type=SecretsGroupSecretTypeChoices.TYPE_USERNAME, + ) + password = credentials.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, + secret_type=SecretsGroupSecretTypeChoices.TYPE_PASSWORD, + ) + try: + secret = credentials.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, + secret_type=SecretsGroupSecretTypeChoices.TYPE_SECRET, + ) + except Exception: + secret = None + except Exception: + return (None, None, None) + else: + username = settings.NAPALM_USERNAME + password = settings.NAPALM_PASSWORD + secret = settings.NAPALM_ARGS.get("secret", None) + return (username, password, secret) def command_getter_do(job_result, log_level, kwargs): @@ -93,8 +128,8 @@ def command_getter_do(job_result, log_level, kwargs): ip_addresses = kwargs["ip_addresses"].replace(" ", "").split(",") port = kwargs["port"] # timeout = kwargs["timeout"] - secrets_group = kwargs["secrets_group"] platform = kwargs["platform"] + username, password, secret = _parse_credentials(kwargs["secrets_group"]) # Initiate Nornir instance with empty inventory try: @@ -108,6 +143,7 @@ def command_getter_do(job_result, log_level, kwargs): }, ) as nornir_obj: nr_with_processors = nornir_obj.with_processors([ProcessorDO(logger, compiled_results, kwargs)]) + loaded_secrets_group = None for entered_ip in ip_addresses: if kwargs["csv_file"]: # get platform if one was provided via csv @@ -116,24 +152,34 @@ def command_getter_do(job_result, log_level, kwargs): if platform_id: platform = Platform.objects.get(id=platform_id) - # get secrets group if one was provided via csv - secrets_group = None + # parse secrets from secrets groups provided via csv secrets_group_id = kwargs["csv_file"][entered_ip]["secrets_group"] if secrets_group_id: - secrets_group = SecretsGroup.objects.get(id=secrets_group_id) + new_secrets_group = SecretsGroup.objects.get(id=secrets_group_id) + # only update the credentials if the secrets_group specified on a csv row + # is different than the secrets group on the previous csv row. This prevents + # unnecessary repeat calls to secrets providers. + if new_secrets_group != loaded_secrets_group: + logger.info(f"Parsing credentials from Secrets Group: {new_secrets_group.name}") + loaded_secrets_group = new_secrets_group + username, password, secret = _parse_credentials(loaded_secrets_group) + if not (username and password): + logger.error(f"Unable to onboard {entered_ip}, failed to parse credentials") + single_host_inventory_constructed = _set_inventory( + host_ip=entered_ip, + platform=platform, + port=kwargs["csv_file"][entered_ip]["port"], + username=username, + password=password, + ) + else: single_host_inventory_constructed = _set_inventory( - host_ip=entered_ip, - platform=platform, - port=kwargs["csv_file"][entered_ip]["port"], - secrets_group=secrets_group, + entered_ip, platform, port, username, password ) - else: - single_host_inventory_constructed = _set_inventory(entered_ip, platform, port, secrets_group) nr_with_processors.inventory.hosts.update(single_host_inventory_constructed) nr_with_processors.run(task=netmiko_send_commands, command_getter_job="device_onboarding") except Exception as err: # pylint: disable=broad-exception-caught - logger.error("Error: %s", err) - return err + logger.error(err) return compiled_results diff --git a/nautobot_device_onboarding/nornir_plays/logger.py b/nautobot_device_onboarding/nornir_plays/logger.py index a7975c3c..ffa4b382 100755 --- a/nautobot_device_onboarding/nornir_plays/logger.py +++ b/nautobot_device_onboarding/nornir_plays/logger.py @@ -24,7 +24,7 @@ def _logging_helper(self, attr: str, message: str, extra: Any = None): if not extra: extra = {} getattr(LOGGER_ADAPTER, attr)(message, extra=extra) - self.job_result.log(message, level_choice=attr, obj=extra.get("object"), grouping=extra.get("grouping", "")) + self.job_result.log(message, level_choice=attr) def debug(self, message: str, extra: Any = None): """Match standard Python Library debug signature.""" diff --git a/nautobot_device_onboarding/nornir_plays/processor.py b/nautobot_device_onboarding/nornir_plays/processor.py index 672aa3ae..8392b79b 100755 --- a/nautobot_device_onboarding/nornir_plays/processor.py +++ b/nautobot_device_onboarding/nornir_plays/processor.py @@ -37,14 +37,14 @@ def task_instance_completed(self, task: Task, host: Host, result: MultiResult) - # Complex logic to see if the task exception is expected, which is depicted by # a sub task raising a NornirNautobotException. # if result.failed: - # for level_1_result in result: - # if hasattr(level_1_result, "exception") and hasattr(level_1_result.exception, "result"): - # print("inside level2 hasatter") - # for level_2_result in level_1_result.exception.result: # type: ignore - # print("inside the level2") - # if isinstance(level_2_result.exception, NornirNautobotException): - # return - # self.logger.critical(f"{task.name} failed: {result.exception}", extra={"object": task.host}) + # for level_1_result in result: + # if hasattr(level_1_result, "exception") and hasattr(level_1_result.exception, "result"): + # print("inside level2 hasatter") + # for level_2_result in level_1_result.exception.result: # type: ignore + # print("inside the level2") + # if isinstance(level_2_result.exception, NornirNautobotException): + # return + # self.logger.critical(f"{task.name} failed: {result.exception}", extra={"object": task.host}) # else: self.logger.info( f"task_instance_completed Task Name: {task.name} Task Result: {result.result}", @@ -64,12 +64,14 @@ def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult """Processor for logging and data processing on subtask completed.""" self.logger.info(f"subtask_instance_completed Subtask completed {task.name}.", extra={"object": task.host}) if self.kwargs["debug"]: - self.logger.debug(f"subtask_instance_completed Subtask result {result.result}.", extra={"object": task.host}) + self.logger.debug( + f"subtask_instance_completed Subtask result {result.result}.", extra={"object": task.host} + ) # self.data[host.name].update( # { # "failed": result.failed, - # + # # } # ) # if not result.failed: diff --git a/nautobot_device_onboarding/utils/diffsync_utils.py b/nautobot_device_onboarding/utils/diffsync_utils.py index 947a9505..59368ae8 100644 --- a/nautobot_device_onboarding/utils/diffsync_utils.py +++ b/nautobot_device_onboarding/utils/diffsync_utils.py @@ -99,8 +99,6 @@ def retrieve_submitted_value(job, ip_address, query_string): return the value input into the job form. """ if job.processed_csv_data: - if job.debug: - job.logger.debug(f"Retrieved csv value: {query_string} for {ip_address}") return job.processed_csv_data[ip_address][query_string] else: return getattr(job, query_string) diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index ac9ff63b..c27167e0 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -1,13 +1,14 @@ """Formatter.""" -import os import json +import os + import yaml from django.template import engines from django.utils.module_loading import import_string from jdiff import extract_data_from_json - from jinja2.sandbox import SandboxedEnvironment + from nautobot_device_onboarding.utils.jinja_filters import fix_interfaces DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), "command_mappers")) @@ -104,7 +105,7 @@ def extract_show_data(host, multi_result, command_getter_type): result = perform_data_extraction(host, default_dict_field, command_info, jinja_env, multi_result[0]) final_result_dict.update(result) else: - # Means their is a "nested" structures. Priority + # Means their is a "nested" structures. Priority for dict_field, nested_command_info in command_info.items(): result = perform_data_extraction(host, dict_field, nested_command_info, jinja_env, multi_result[0]) final_result_dict.update(result) diff --git a/nautobot_device_onboarding/utils/inventory_creator.py b/nautobot_device_onboarding/utils/inventory_creator.py index 9b6e9ea8..a22f6bbc 100755 --- a/nautobot_device_onboarding/utils/inventory_creator.py +++ b/nautobot_device_onboarding/utils/inventory_creator.py @@ -1,42 +1,11 @@ """Inventory Creator and Helpers.""" -from django.conf import settings -from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from netmiko import SSHDetect from nornir.core.inventory import ConnectionOptions, Host -from nautobot_device_onboarding.exceptions import OnboardException from nautobot_device_onboarding.utils.helper import _get_platform_parsing_info -def _parse_credentials(credentials): - """Parse and return dictionary of credentials.""" - if credentials: - try: - username = credentials.get_secret_value( - access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, - secret_type=SecretsGroupSecretTypeChoices.TYPE_USERNAME, - ) - password = credentials.get_secret_value( - access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, - secret_type=SecretsGroupSecretTypeChoices.TYPE_PASSWORD, - ) - try: - secret = credentials.get_secret_value( - access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, - secret_type=SecretsGroupSecretTypeChoices.TYPE_SECRET, - ) - except Exception: - secret = None - except Exception as err: - raise OnboardException("fail-credentials - Unable to parse selected credentials.") from err - else: - username = settings.NAPALM_USERNAME - password = settings.NAPALM_PASSWORD - secret = settings.NAPALM_ARGS.get("secret", None) - return (username, password, secret) - - def guess_netmiko_device_type(hostname, username, password, port): """Guess the device type of host, based on Netmiko.""" guessed_device_type = None @@ -61,10 +30,9 @@ def guess_netmiko_device_type(hostname, username, password, port): return guessed_device_type -def _set_inventory(host_ip, platform, port, secrets_group): +def _set_inventory(host_ip, platform, port, username, password): """Construct Nornir Inventory.""" inv = {} - username, password, secret = _parse_credentials(secrets_group) if platform: platform = platform.network_driver else: From 2bfe0a2bb1045c6e9b2077825f110a180ee0b5e2 Mon Sep 17 00:00:00 2001 From: David Cates Date: Thu, 7 Mar 2024 17:23:47 -0700 Subject: [PATCH 147/225] update onboarding adapter --- .../diffsync/adapters/onboarding_adapters.py | 10 +++++----- .../nornir_plays/command_getter.py | 4 +--- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index bc4a7b29..8eed48b2 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -277,14 +277,12 @@ def _fields_missing_data(self, device_data, ip_address, platform): """Verify that all of the fields returned from a device actually contain data.""" fields_missing_data = [] required_fields_from_device = ["device_type", "hostname", "mgmt_interface", "mask_length", "serial"] - if platform: # platform is only retruned with device data if not provided on the job form/csv required_fields_from_device.append("platform") - for field in required_fields_from_device: - if not device_data[ip_address][field]: + data = device_data[ip_address] + if not data.get(field): fields_missing_data.append(field) - return fields_missing_data def load_devices(self): @@ -331,7 +329,9 @@ def load_devices(self): except KeyError as err: self.job.logger.error(f"{ip_address}: Unable to load Device due to missing key in returned data, {err}") except ValueError as err: - self.job.logger.error(f"{ip_address}: Unable to load Device due to invalid data type in data return, {err}") + self.job.logger.error( + f"{ip_address}: Unable to load Device due to invalid data type in data return, {err}" + ) fields_missing_data = self._fields_missing_data( device_data=self.device_data, ip_address=ip_address, platform=platform diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index d21feab8..0c82694c 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -173,9 +173,7 @@ def command_getter_do(job_result, log_level, kwargs): password=password, ) else: - single_host_inventory_constructed = _set_inventory( - entered_ip, platform, port, username, password - ) + single_host_inventory_constructed = _set_inventory(entered_ip, platform, port, username, password) nr_with_processors.inventory.hosts.update(single_host_inventory_constructed) nr_with_processors.run(task=netmiko_send_commands, command_getter_job="device_onboarding") except Exception as err: # pylint: disable=broad-exception-caught From c465c548c133f8153474468e81b333a494d000fc Mon Sep 17 00:00:00 2001 From: David Cates Date: Thu, 7 Mar 2024 22:25:50 -0700 Subject: [PATCH 148/225] set defaut platform value to None when loading devices in onboarding adapter --- .../diffsync/adapters/onboarding_adapters.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index 8eed48b2..4316b1d2 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -288,6 +288,7 @@ def _fields_missing_data(self, device_data, ip_address, platform): def load_devices(self): """Load devices into the DiffSync store.""" for ip_address in self.device_data: + platform = None # If an excption is caught below, the platform must still be set. try: if self.job.debug: self.job.logger.debug(f"loading device data for {ip_address}") From 4b7ef3c340a46a2f273c6e2af6074f5e4e2c1e2c Mon Sep 17 00:00:00 2001 From: David Cates Date: Thu, 7 Mar 2024 22:40:40 -0700 Subject: [PATCH 149/225] yamllint, ruff --- .../command_mappers/juniper_junos.yml | 10 +++++----- nautobot_device_onboarding/jobs.py | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/juniper_junos.yml b/nautobot_device_onboarding/command_mappers/juniper_junos.yml index 963d54f9..9478c215 100755 --- a/nautobot_device_onboarding/command_mappers/juniper_junos.yml +++ b/nautobot_device_onboarding/command_mappers/juniper_junos.yml @@ -4,26 +4,26 @@ device_onboarding: commands: - command: "show version | display json" use_textfsm: false - jpath: '"software-information"[]."host-name"[].data' + jpath: "'software-information'[].'host-name'[].data" serial: commands: - command: "show chassis hardware | display json" use_textfsm: false - jpath: '"chassis-inventory"[]."chassis"[]."serial-number"[].data' + jpath: "'chassis-inventory'[].'chassis'[].'serial-number'[].data" device_type: commands: - command: "show chassis hardware | display json" use_textfsm: false - jpath: '"chassis-inventory"[]."chassis"[]."description"[].data' + jpath: "'chassis-inventory'[].'chassis'[].'description'[].data" mgmt_interface: commands: - command: "show interfaces | display json" use_textfsm: false - jpath: '"interface-information"[]."physical-interface"[]."logical-interface"[].{name: name[].data, ip: "address-family"[]."interface-address"[]."ifa-local"[].data}' + jpath: "'interface-information'[].'physical-interface'[].'logical-interface'[].{name: name[].data, ip: 'address-family'[].'interface-address'[].'ifa-local'[].data}" post_processor: "{% for entry in obj %}{% if entry['ip'] is not none %}{% for ipaddr in entry['ip'] %}{% if original_host in ipaddr %}{{ entry['name'] | first }}{% endif %}{% endfor %}{% endif %}{% endfor %}" mask_length: commands: - command: "show route protocol direct | display json" use_textfsm: false - jpath: '"route-information"[]."route-table"[]."rt"[]."rt-destination"[].data' + jpath: "'route-information'[].'route-table'[].'rt'[].'rt-destination'[].data" post_processor: "{% for ipaddr in obj %}{% if ipaddr.split('/')[0] == original_host %}{{ ipaddr.split('/')[1] }}{% endif %}{% endfor %}" diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 0c27154e..b116d864 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -304,7 +304,7 @@ def _convert_sring_to_bool(self, string, header): ) def _process_csv_data(self, csv_file): - """ "Convert CSV data into a dictionary containing Nautobot objects.""" + """Convert CSV data into a dictionary containing Nautobot objects.""" self.logger.info("Decoding CSV file...") decoded_csv_file = csv_file.read().decode("utf-8") csv_reader = csv.DictReader(StringIO(decoded_csv_file)) From 605cd627e01669aa2d3e5a6262449293397da0a9 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Fri, 8 Mar 2024 21:20:32 -0600 Subject: [PATCH 150/225] add more performant lookup to mgmt interface cisco ios --- nautobot_device_onboarding/command_mappers/cisco_ios.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/command_mappers/cisco_ios.yml b/nautobot_device_onboarding/command_mappers/cisco_ios.yml index 633386ed..2852004c 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_ios.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_ios.yml @@ -21,7 +21,7 @@ device_onboarding: - command: "show interfaces" use_textfsm: true jpath: "[?ip_address=='{{ obj }}'].{name: interface, enabled: link_status}" - post_processor: "{% for interface in obj %}{% if 'up' in interface['enabled'] %}{{ interface.name }}{% endif %}{% endfor %}" + post_processor: "{{ (obj | selectattr('enabled', 'eq', 'up') | list | first ).name }}" mask_length: commands: - command: "show interfaces" From 709e0dc8ea5c028b4cb01167c7d797081402bb24 Mon Sep 17 00:00:00 2001 From: David Cates Date: Mon, 11 Mar 2024 09:56:25 -0700 Subject: [PATCH 151/225] bump version, fix bug with DO adapter --- .../diffsync/adapters/onboarding_adapters.py | 6 +++--- pyproject.toml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index 4316b1d2..fb456a0a 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -139,9 +139,6 @@ def load(self): class OnboardingNetworkAdapter(diffsync.DiffSync): """Adapter for loading device data from a network.""" - device_data = None - failed_ip_addresses = [] - manufacturer = onboarding_models.OnboardingManufacturer platform = onboarding_models.OnboardingPlatform device = onboarding_models.OnboardingDevice @@ -154,6 +151,8 @@ def __init__(self, job, sync, *args, **kwargs): super().__init__(*args, **kwargs) self.job = job self.sync = sync + self.device_data = None + self.failed_ip_addresses = [] def _validate_ip_addresses(self, ip_addresses): """Validate the format of each IP Address in a list of IP Addresses.""" @@ -176,6 +175,7 @@ def _handle_failed_devices(self, device_data): If a device fails to return expected data, log the result and remove it from the data to be loaded into the diffsync store. """ + self.failed_ip_addresses = [] for ip_address in device_data: if not device_data[ip_address]: self.job.logger.error(f"{ip_address}: Connection or data error, this device will not be synced.") diff --git a/pyproject.toml b/pyproject.toml index bf7e6998..866ed2f5 100755 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nautobot-device-onboarding" -version = "3.0.2a5" +version = "3.0.2a6" description = "A app for Nautobot to easily onboard new devices." authors = ["Network to Code, LLC "] license = "Apache-2.0" From 62267e75b722b2a749eaea8d413f774a6444b355 Mon Sep 17 00:00:00 2001 From: David Cates Date: Wed, 13 Mar 2024 10:35:48 -0700 Subject: [PATCH 152/225] update DO adapters, models and job --- .../diffsync/adapters/onboarding_adapters.py | 111 ++++++++----- .../diffsync/models/onboarding_models.py | 156 +++++++++++------- nautobot_device_onboarding/jobs.py | 20 ++- 3 files changed, 180 insertions(+), 107 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index fb456a0a..2930f530 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -77,7 +77,7 @@ def load_platforms(self): name=platform.name, network_driver=platform.network_driver if platform.network_driver else "", manufacturer__name=platform.manufacturer.name if platform.manufacturer else None, - ) # type: ignore + ) self.add(onboarding_platform) if self.job.debug: self.job.logger.debug(f"Platform: {platform.name} loaded.") @@ -104,11 +104,17 @@ def load_devices(self): for device in Device.objects.filter(primary_ip4__host__in=self.job.ip_addresses): interface_list = [] - # Only interfaces with the device's primeary ip should be considered for diff calculations + # Only interfaces with the device's primary ip should be considered for diff calculations + # Ultimately, only the first matching interface is used but this list could support multiple + # interface syncs in the future. for interface in device.interfaces.all(): if device.primary_ip4 in interface.ip_addresses.all(): interface_list.append(interface.name) - + if interface_list: + interface_list.sort() + interfaces = [interface_list[0]] + else: + interfaces = [] onboarding_device = self.device( diffsync=self, device_type__model=device.device_type.model, @@ -120,10 +126,10 @@ def load_devices(self): role__name=device.role.name, status__name=device.status.name, secrets_group__name=device.secrets_group.name if device.secrets_group else "", - interfaces=interface_list, + interfaces=interfaces, mask_length=device.primary_ip4.mask_length if device.primary_ip4 else None, serial=device.serial, - ) # type: ignore + ) self.add(onboarding_device) if self.job.debug: self.job.logger.debug(f"Device: {device.name} loaded.") @@ -175,6 +181,7 @@ def _handle_failed_devices(self, device_data): If a device fails to return expected data, log the result and remove it from the data to be loaded into the diffsync store. """ + self.device_data = None self.failed_ip_addresses = [] for ip_address in device_data: if not device_data[ip_address]: @@ -212,66 +219,77 @@ def execute_command_getter(self): ) raise ValidationError("Unexpected data returend from CommandGetter.") + def _add_ip_address_to_failed_list(self, ip_address): + """If an a model fails to load, add the ip address to the failed list for logging.""" + if ip_address not in self.failed_ip_addresses: + self.failed_ip_addresses.append(ip_address) + def load_manufacturers(self): """Load manufacturers into the DiffSync store.""" for ip_address in self.device_data: + if self.job.debug: + self.job.logger.debug(f"loading manufacturer data for {ip_address}") + onboarding_manufacturer = None try: - if self.job.debug: - self.job.logger.debug(f"loading manufacturer data for {ip_address}") onboarding_manufacturer = self.manufacturer( diffsync=self, name=self.device_data[ip_address]["manufacturer"], - ) # type: ignore + ) + except KeyError as err: + self.job.logger.error( + f"{ip_address}: Unable to load Manufacturer due to a missing key in returned data, {err.args}" + ) + if onboarding_manufacturer: try: self.add(onboarding_manufacturer) except diffsync.ObjectAlreadyExists: pass - except KeyError as err: - self.job.logger.error( - f"{ip_address}: Unable to load Manufacturer due to missing key in returned data, {err}" - ) def load_platforms(self): """Load platforms into the DiffSync store.""" for ip_address in self.device_data: + if self.job.debug: + self.job.logger.debug(f"loading platform data for {ip_address}") + onboarding_platform = None try: - if self.job.debug: - self.job.logger.debug(f"loading platform data for {ip_address}") onboarding_platform = self.platform( diffsync=self, name=self.device_data[ip_address]["platform"], manufacturer__name=self.device_data[ip_address]["manufacturer"], network_driver=self.device_data[ip_address]["network_driver"], - ) # type: ignore + ) + except KeyError as err: + self.job.logger.error( + f"{ip_address}: Unable to load Platform due to a missing key in returned data, {err.args}" + ) + if onboarding_platform: try: self.add(onboarding_platform) except diffsync.ObjectAlreadyExists: pass - except KeyError as err: - self.job.logger.error( - f"{ip_address}: Unable to load Platform due to missing key in returned data, {err}" - ) def load_device_types(self): """Load device types into the DiffSync store.""" for ip_address in self.device_data: + if self.job.debug: + self.job.logger.debug(f"loading device_type data for {ip_address}") + onboarding_device_type = None try: - if self.job.debug: - self.job.logger.debug(f"loading device_type data for {ip_address}") onboarding_device_type = self.device_type( diffsync=self, model=self.device_data[ip_address]["device_type"], part_number=self.device_data[ip_address]["device_type"], manufacturer__name=self.device_data[ip_address]["manufacturer"], - ) # type: ignore + ) + except KeyError as err: + self.job.logger.error( + f"{ip_address}: Unable to load DeviceType due to a missing key in returned data, {err.args}" + ) + if onboarding_device_type: try: self.add(onboarding_device_type) except diffsync.ObjectAlreadyExists: pass - except KeyError as err: - self.job.logger.error( - f"{ip_address}: Unable to load DeviceType due to missing key in returned data, {err}" - ) def _fields_missing_data(self, device_data, ip_address, platform): """Verify that all of the fields returned from a device actually contain data.""" @@ -288,11 +306,11 @@ def _fields_missing_data(self, device_data, ip_address, platform): def load_devices(self): """Load devices into the DiffSync store.""" for ip_address in self.device_data: + if self.job.debug: + self.job.logger.debug(f"loading device data for {ip_address}") platform = None # If an excption is caught below, the platform must still be set. + onboarding_device = None try: - if self.job.debug: - self.job.logger.debug(f"loading device data for {ip_address}") - location = diffsync_utils.retrieve_submitted_value( job=self.job, ip_address=ip_address, query_string="location" ) @@ -326,9 +344,13 @@ def load_devices(self): interfaces=[self.device_data[ip_address]["mgmt_interface"]], mask_length=int(self.device_data[ip_address]["mask_length"]), serial=self.device_data[ip_address]["serial"], - ) # type: ignore + ) except KeyError as err: - self.job.logger.error(f"{ip_address}: Unable to load Device due to missing key in returned data, {err}") + self.job.logger.error( + f"{ip_address}: Unable to load Device due to a missing key in returned data, {err.args}" + ) + if ip_address not in self.failed_ip_addresses: + self.failed_ip_addresses.append(ip_address) except ValueError as err: self.job.logger.error( f"{ip_address}: Unable to load Device due to invalid data type in data return, {err}" @@ -338,22 +360,27 @@ def load_devices(self): device_data=self.device_data, ip_address=ip_address, platform=platform ) if fields_missing_data: - self.failed_ip_addresses.append(ip_address) + onboarding_device = None self.job.logger.error( f"Unable to onbaord {ip_address}, returned data missing for {fields_missing_data}" ) else: - try: - self.add(onboarding_device) + if onboarding_device: + try: + self.add(onboarding_device) + if self.job.debug: + self.job.logger.debug(f"Device: {self.device_data[ip_address]['hostname']} loaded.") + except diffsync.ObjectAlreadyExists: + self.job.logger.error( + f"Device: {self.device_data[ip_address]['hostname']} has already been loaded! " + f"Duplicate devices will not be synced. " + f"[Serial Number: {self.device_data[ip_address]['serial']}, " + f"IP Address: {ip_address}]" + ) + else: + self._add_ip_address_to_failed_list(ip_address=ip_address) if self.job.debug: - self.job.logger.debug(f"Device: {self.device_data[ip_address]['hostname']} loaded.") - except diffsync.ObjectAlreadyExists: - self.job.logger.error( - f"Device: {self.device_data[ip_address]['hostname']} has already been loaded! " - f"Duplicate devices will not be synced. " - f"[Serial Number: {self.device_data[ip_address]['serial']}, " - f"IP Address: {ip_address}]" - ) + self.job.logger.debug(f"{ip_address} was added to the failed ip_address list") def load(self): """Load network data.""" diff --git a/nautobot_device_onboarding/diffsync/models/onboarding_models.py b/nautobot_device_onboarding/diffsync/models/onboarding_models.py index ea6fa121..a2a4713c 100644 --- a/nautobot_device_onboarding/diffsync/models/onboarding_models.py +++ b/nautobot_device_onboarding/diffsync/models/onboarding_models.py @@ -7,6 +7,7 @@ from nautobot.apps.choices import InterfaceTypeChoices from nautobot.dcim.models import Device, DeviceType, Interface, Manufacturer, Platform from nautobot.extras.models import Role, SecretsGroup, Status +from nautobot.ipam.models import IPAddressToInterface from nautobot_ssot.contrib import NautobotModel from nautobot_device_onboarding.utils import diffsync_utils @@ -56,7 +57,7 @@ def _get_or_create_device(cls, diffsync, ids, attrs): # Only Devices with a primary ip address are loaded from Nautobot when syncing. # If a device is found in Nautobot with a matching name and location as the # device being created, but the primary ip address doesn't match an ip address entered, - # the matching device will be updated or skipped based on user preference. + # (or doesn't exist) the matching device will be updated or skipped based on user preference. location = diffsync_utils.retrieve_submitted_value( job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="location" @@ -72,7 +73,8 @@ def _get_or_create_device(cls, diffsync, ids, attrs): diffsync.job.logger.warning( f"Device {device.name} at location {location.name} already exists in Nautobot " "but the primary ip address either does not exist, or doesn't match an entered ip address. " - "This device will be updated." + "This device will be updated. This update may result in multiple IP Address assignments " + "to an interface on the device." ) device = cls._update_device_with_attrs(device, platform, ids, attrs, diffsync) else: @@ -105,34 +107,55 @@ def _get_or_create_device(cls, diffsync, ids, attrs): return device @classmethod - def _get_or_create_interface(cls, diffsync, device, attrs): + def _get_or_create_interface(cls, diffsync, device, ip_address, interface_name): """Attempt to get a Device Interface, create a new one if necessary.""" device_interface = None try: device_interface = Interface.objects.get( - name=attrs["interfaces"][0], + name=interface_name, device=device, ) except ObjectDoesNotExist: try: - device_interface = Interface.objects.create( - name=attrs["interfaces"][0], + device_interface = Interface( + name=interface_name, mgmt_only=diffsync_utils.retrieve_submitted_value( - job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="set_mgmt_only" + job=diffsync.job, ip_address=ip_address, query_string="set_mgmt_only" ), status=diffsync_utils.retrieve_submitted_value( - job=diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="interface_status" + job=diffsync.job, ip_address=ip_address, query_string="interface_status" ), type=InterfaceTypeChoices.TYPE_OTHER, device=device, ) - except ValidationError as err: + device_interface.validated_save() + except Exception as err: diffsync.job.logger.error(f"Device Interface could not be created, {err}") return device_interface + @classmethod + def _get_or_create_ip_address_to_interface(cls, diffsync, interface, ip_address): + """Attempt to get a Device Interface, create a new one if necessary.""" + interface_assignment = None + try: + interface_assignment = IPAddressToInterface.objects.get( + ip_address=ip_address, + interface=interface, + ) + except ObjectDoesNotExist: + try: + interface_assignment = IPAddressToInterface( + ip_address=ip_address, + interface=interface, + ) + interface_assignment.validated_save() + except Exception as err: + diffsync.job.logger.error(f"{ip_address} failed to assign to assign to interface {err}") + return interface_assignment + @classmethod def _update_device_with_attrs(cls, device, platform, ids, attrs, diffsync): - """Update a Nautobot device instance.""" + """Update a Nautobot device instance with attrs.""" device.location = diffsync_utils.retrieve_submitted_value( job=diffsync.job, ip_address=attrs["primary_ip4__host"], @@ -159,6 +182,28 @@ def _update_device_with_attrs(cls, device, platform, ids, attrs, diffsync): return device + def _remove_old_interface_assignment(self, device, ip_address): + """Remove a device's primary IP address from an interface.""" + try: + old_interface = Interface.objects.get( + device=device, + ip_addresses__in=[ip_address], + ) + old_interface_assignment = IPAddressToInterface.objects.get( + interface=old_interface, + ip_address=ip_address, + ) + old_interface_assignment.delete() + if self.diffsync.job.debug: + self.diffsync.job.logger.debug(f"Interface assignment deleted: {old_interface_assignment}") + except MultipleObjectsReturned: + self.diffsync.job.logger.warning( + f"{ip_address} is assigned to multiple interfaces. The primary IP Address for this " + "device will be assigned to an interface but duplicate assignments will remain." + ) + except ObjectDoesNotExist: + pass + @classmethod def create(cls, diffsync, ids, attrs): """Create a new nautobot device using data scraped from a device.""" @@ -182,10 +227,13 @@ def create(cls, diffsync, ids, attrs): ), job=diffsync.job, ) - interface = cls._get_or_create_interface(diffsync=diffsync, device=device, attrs=attrs) - interface.ip_addresses.add(ip_address) - interface.validated_save() - + interface = cls._get_or_create_interface( + diffsync=diffsync, + device=device, + ip_address=attrs["primary_ip4__host"], + interface_name=attrs["interfaces"][0], + ) + cls._get_or_create_ip_address_to_interface(diffsync=diffsync, ip_address=ip_address, interface=interface) # Assign primary IP Address to Device device.primary_ip4 = ip_address @@ -215,11 +263,8 @@ def update(self, attrs): device.status = Status.objects.get(name=attrs.get("status__name")) if attrs.get("secrets_group__name"): device.secrets_group = SecretsGroup.objects.get(name=attrs.get("secrets_group__name")) - if attrs.get("serial"): - device.primary_ip.serial = attrs.get("serial") if attrs.get("interfaces"): - interface = self._get_or_create_interface(diffsync=self.diffsync, device=device, attrs=attrs) # Update both the interface and primary ip address if attrs.get("primary_ip4__host"): # If the primary ip address is being updated, the mask length must be included @@ -230,72 +275,71 @@ def update(self, attrs): host=attrs["primary_ip4__host"], mask_length=attrs["mask_length"], namespace=diffsync_utils.retrieve_submitted_value( - job=self.job, ip_address=attrs["primary_ip4__host"], query_string="namespace" + job=self.diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="namespace" ), default_ip_status=diffsync_utils.retrieve_submitted_value( - job=self.job, ip_address=attrs["primary_ip4__host"], query_string="ip_address_status" + job=self.diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="ip_address_status" ), default_prefix_status=diffsync_utils.retrieve_submitted_value( - job=self.job, ip_address=attrs["primary_ip4__host"], query_string="ip_address_status" + job=self.diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="ip_address_status" ), job=self.diffsync.job, ) - interface.ip_addresses.add(ip_address) - interface.validated_save() - # set the new ip address as the device primary ip address + new_interface = self._get_or_create_interface( + diffsync=self.diffsync, + device=device, + ip_address=attrs["primary_ip4__host"], + interface_name=attrs["interfaces"][0], + ) + self._get_or_create_ip_address_to_interface( + diffsync=self.diffsync, ip_address=ip_address, interface=new_interface + ) device.primary_ip4 = ip_address - interface.validated_save() # Update the interface only else: - # Check for an interface with a matching IP Address and remove it before - # assigning the IP Address to the new interface - try: - old_interface = Interface.objects.get( - device=device, - ip_addresses__in=[device.primary_ip4], - ) - old_interface.ip_addresses.remove(device.primary_ip4) - interface.ip_addresses.add(device.primary_ip4) - interface.validated_save() - except MultipleObjectsReturned: - self.diffsync.job.logger.warning( - f"{device.primary_ip4} is assigned to multiple interfaces. A new " - "interface will be created and assigned this IP Address, but the " - "duplicate assignments will remain." - ) - except ObjectDoesNotExist: - interface.ip_addresses.add(device.primary_ip4) - interface.validated_save() + # Remove the primary IP Address from the old managment interface + self._remove_old_interface_assignment(device=device, ip_address=device.primary_ip4) + + new_interface = self._get_or_create_interface( + diffsync=self.diffsync, + device=device, + ip_address=self.primary_ip4__host, + interface_name=attrs["interfaces"][0], + ) + self._get_or_create_ip_address_to_interface( + diffsync=self.diffsync, ip_address=device.primary_ip4, interface=new_interface + ) else: # Update the primary ip address only - - # The OnboardingNautobotAdapter only loads devices with primary ips matching those - # entered for onboarding. This will not be called unless the adapter is changed to - # include all devices if attrs.get("primary_ip4__host"): if not attrs.get("mask_length"): attrs["mask_length"] = device.primary_ip4.mask_length - ip_address = diffsync_utils.get_or_create_ip_address( + new_ip_address = diffsync_utils.get_or_create_ip_address( host=attrs["primary_ip4__host"], mask_length=attrs["mask_length"], namespace=diffsync_utils.retrieve_submitted_value( - job=self.job, ip_address=attrs["primary_ip4__host"], query_string="namespace" + job=self.diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="namespace" ), default_ip_status=diffsync_utils.retrieve_submitted_value( - job=self.job, ip_address=attrs["primary_ip4__host"], query_string="ip_address_status" + job=self.diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="ip_address_status" ), default_prefix_status=diffsync_utils.retrieve_submitted_value( - job=self.job, ip_address=attrs["primary_ip4__host"], query_string="ip_address_status" + job=self.diffsync.job, ip_address=attrs["primary_ip4__host"], query_string="ip_address_status" ), job=self.diffsync.job, ) - interface = Interface.objects.get( - device=device, ip_addresses__in=[device.primary_ip4], name=self.get_attrs()["interfaces"][0] + self._remove_old_interface_assignment(device=device, ip_address=device.primary_ip4) + existing_interface = self._get_or_create_interface( + diffsync=self.diffsync, + device=device, + ip_address=new_ip_address, + interface_name=self.get_attrs()["interfaces"][0], + ) + self._get_or_create_ip_address_to_interface( + diffsync=self.diffsync, ip_address=new_ip_address, interface=existing_interface ) - interface.ip_addresses.add(ip_address) - interface.validated_save() - device.primary_ip4 = ip_address + device.primary_ip4 = new_ip_address try: device.validated_save() except ValidationError as err: diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index b116d864..2cd3aecc 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -204,11 +204,12 @@ def _parse_credentials(self, credentials): class SSOTDeviceOnboarding(DataSource): # pylint: disable=too-many-instance-attributes """Job for syncing basic device info from a network into Nautobot.""" - def __init__(self): + def __init__(self, *args, **kwargs): """Initialize SSOTDeviceOnboarding.""" - super().__init__() + super().__init__(*args, **kwargs) self.processed_csv_data = {} self.task_kwargs_csv_data = {} + self.diffsync_flags = DiffSyncFlags.SKIP_UNMATCHED_DST class Meta: @@ -239,7 +240,7 @@ class Meta: port = IntegerVar(required=False, default=22) timeout = IntegerVar(required=False, default=30) set_mgmt_only = BooleanVar( - default=False, + default=True, label="Set Management Only", description="If True, new interfaces that are created will be set to management only. If False, new interfaces will be set to not be management only.", ) @@ -311,6 +312,7 @@ def _process_csv_data(self, csv_file): self.logger.info("Processing CSV data...") processing_failed = False processed_csv_data = {} + self.task_kwargs_csv_data = {} row_count = 1 for row in csv_reader: query = None @@ -432,8 +434,6 @@ def run( if csv_file: self.processed_csv_data = self._process_csv_data(csv_file=csv_file) if self.processed_csv_data: - if self.debug: - self.logger.debug(self.processed_csv_data) # create a list of ip addresses for processing in the adapter self.ip_addresses = [] for ip_address in self.processed_csv_data: @@ -504,21 +504,23 @@ def run( class SSOTNetworkImporter(DataSource): # pylint: disable=too-many-instance-attributes """Job syncing extended device attributes into Nautobot.""" - def __init__(self): + def __init__(self, *args, **kwargs): """Initialize SSOTNetworkImporter.""" - super().__init__() + super().__init__(*args, **kwargs) + self.filtered_devices = None # Queryset of devices based on form inputs - # FOR TESTING ONLY # + # FOR TESTING ONLY, REMOVE WHEN NOT TESTING # from nautobot_device_onboarding.diffsync import mock_data # from nautobot_device_onboarding.utils import diffsync_utils # self.command_getter_result = mock_data.network_importer_mock_data # self.devices_to_load = diffsync_utils.generate_device_queryset_from_command_getter_result(mock_data.network_importer_mock_data) - # REMOVE WHEN NOT TESTING # + # FOR TESTING ONLY, REMOVE WHEN NOT TESTING # RESTORE THESE LINES WHEN NOT TESTING! # self.command_getter_result = None # Dict result from CommandGetter job self.devices_to_load = None # Queryset consisting of devices that responded + # RESTORE THESE LINES WHEN NOT TESTING! # class Meta: """Metadata about this Job.""" From b2b8274d9694f3cd351bdd4e31ad6a1c75012796 Mon Sep 17 00:00:00 2001 From: David Cates Date: Wed, 13 Mar 2024 11:54:52 -0700 Subject: [PATCH 153/225] bump version --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 866ed2f5..bca6eff6 100755 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nautobot-device-onboarding" -version = "3.0.2a6" +version = "3.0.2a7" description = "A app for Nautobot to easily onboard new devices." authors = ["Network to Code, LLC "] license = "Apache-2.0" From e5f6e520154e120640d6cd52534e468c73f3d990 Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Thu, 14 Mar 2024 22:03:03 +0000 Subject: [PATCH 154/225] formatting changes --- .../command_mappers/cisco_ios.yml | 77 ++++++++++--------- .../command_mappers/juniper_junos.yml | 8 +- nautobot_device_onboarding/constants.py | 8 ++ nautobot_device_onboarding/jobs.py | 50 ++++++++++++ .../nornir_plays/processor.py | 1 + nautobot_device_onboarding/utils/formatter.py | 13 +++- 6 files changed, 118 insertions(+), 39 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/cisco_ios.yml b/nautobot_device_onboarding/command_mappers/cisco_ios.yml index 2852004c..600f15d1 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_ios.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_ios.yml @@ -29,27 +29,32 @@ device_onboarding: jpath: "[?ip_address=='{{ obj }}'].prefix_length" post_processor: "{{ obj | unique | first }}" network_importer: - serial: - commands: - - command: "show version" - use_textfsm: true - jpath: "[*].serial[0]" - interfaces: - interfaces: - commands: - - command: "show interfaces" - use_textfsm: true - jpath: "[*].{interface: interface}" + # serial: + # commands: + # - command: "show version" + # use_textfsm: true + # jpath: "[*].serial[0]" + # interfaces: + # interfaces: + # commands: + # - command: "show interfaces" + # use_textfsm: true + # jpath: "[*].{interface: interface}" type: commands: - command: "show interfaces" use_textfsm: true - jpath: "[*].{interface: interface, hardware_type: hardware_type}" + jpath: "[*].{interface: interface, type: hardware_type}" ip_addresses: commands: - command: "show interfaces" use_textfsm: true jpath: "[*].{interface: interface, ip_address: ip_address}" + prefix_length: + commands: + - command: "show interfaces" + use_textfsm: true + jpath: "[*].{interface: interface, prefix_length: prefix_length}" mtu: commands: - command: "show interfaces" @@ -70,27 +75,27 @@ network_importer: - command: "show interfaces" use_textfsm: true jpath: "[*].{interface: interface, link_status: link_status}" - dot1q_mode: - commands: - - command: "show interfaces switchport" - use_textfsm: true - jpath: "[*].mode" - - command: "show interfaces" - use_textfsm: true - jpath: "[*].{interface: interface, encapsulation: encapsulation}" - validator_pattern: "not None" - lag: - commands: - - command: "show etherchannel summary" - use_textfsm: true - jpath: "[*].protocol" - untagged_vlan: - commands: - - command: "show vlans" - use_textfsm: true - jpath: "[*].vlan_id" - tagged_vlans: - commands: - - command: "show vlans" - use_textfsm: true - jpath: "[*].vlan_id" + # dot1q_mode: + # commands: + # # - command: "show interfaces switchport" + # # use_textfsm: true + # # jpath: "[*].mode" + # - command: "show interfaces switchport" + # use_textfsm: true + # jpath: "[*].{interface: interface, dot1q_mode: admin_mode}" + # validator_pattern: "not None" + # # lag: + # # commands: + # # - command: "show etherchannel summary" + # # use_textfsm: true + # # jpath: "[*].protocol" + # # untagged_vlan: + # # commands: + # # - command: "show vlans" + # # use_textfsm: true + # # jpath: "[*].vlan_id" + # # tagged_vlans: + # # commands: + # # - command: "show vlans" + # # use_textfsm: true + # # jpath: "[*].vlan_id" diff --git a/nautobot_device_onboarding/command_mappers/juniper_junos.yml b/nautobot_device_onboarding/command_mappers/juniper_junos.yml index 9478c215..6c3748e3 100755 --- a/nautobot_device_onboarding/command_mappers/juniper_junos.yml +++ b/nautobot_device_onboarding/command_mappers/juniper_junos.yml @@ -17,13 +17,17 @@ device_onboarding: jpath: "'chassis-inventory'[].'chassis'[].'description'[].data" mgmt_interface: commands: - - command: "show interfaces | display json" + - command: "show interfaces terse | display json" use_textfsm: false jpath: "'interface-information'[].'physical-interface'[].'logical-interface'[].{name: name[].data, ip: 'address-family'[].'interface-address'[].'ifa-local'[].data}" post_processor: "{% for entry in obj %}{% if entry['ip'] is not none %}{% for ipaddr in entry['ip'] %}{% if original_host in ipaddr %}{{ entry['name'] | first }}{% endif %}{% endfor %}{% endif %}{% endfor %}" mask_length: commands: - - command: "show route protocol direct | display json" + # - command: "show route protocol direct | display json" + # use_textfsm: false + # jpath: '"route-information"[]."route-table"[]."rt"[]."rt-destination"[].data || [`31`]' + # post_processor: "{% for ipaddr in obj %}{% if '/' in ipaddr and ipaddr.split('/')[0] == original_host %}{{ ipaddr.split('/')[1] }}{% endif %}{% endfor %}" + - command: "show interfaces terse | display json" use_textfsm: false jpath: "'route-information'[].'route-table'[].'rt'[].'rt-destination'[].data" post_processor: "{% for ipaddr in obj %}{% if ipaddr.split('/')[0] == original_host %}{{ ipaddr.split('/')[1] }}{% endif %}{% endfor %}" diff --git a/nautobot_device_onboarding/constants.py b/nautobot_device_onboarding/constants.py index caccc432..747468b2 100644 --- a/nautobot_device_onboarding/constants.py +++ b/nautobot_device_onboarding/constants.py @@ -8,3 +8,11 @@ "juniper_junos": "junos", "cisco_xr": "iosxr", } + +INTERFACE_TYPE_MAP_STATIC = { + "Gigabit Ethernet": "1000base-t", + "Ten Gigabit Ethernet": "10gbase-t", + "Forty Gigabit Ethernet": "40gbase-t", + "Ethernet SVI": "virtual", + "EthernetChannel": "lag", +} diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 2cd3aecc..de0e6c23 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -24,10 +24,14 @@ OnboardingNautobotAdapter, OnboardingNetworkAdapter, ) + + from nautobot_device_onboarding.exceptions import OnboardException from nautobot_device_onboarding.helpers import onboarding_task_fqdn_to_ip from nautobot_device_onboarding.netdev_keeper import NetdevKeeper +from nautobot_device_onboarding.utils.formatter import map_interface_type from nautobot_device_onboarding.nornir_plays.command_getter import command_getter_do, command_getter_ni +from netutils.interface import canonical_interface_name PLUGIN_SETTINGS = settings.PLUGINS_CONFIG["nautobot_device_onboarding"] @@ -688,6 +692,52 @@ class Meta: def run(self, *args, **kwargs): """Run command getter.""" compiled_results = command_getter_ni(self.job_result, self.logger.getEffectiveLevel(), kwargs) + for device, device_data in compiled_results.items(): + self.logger.info(f"Device Data: {device_data}") + serial = Device.objects.get(name=device).serial + self.logger.info(f"Serial: {serial}") + mtu_list = device_data.get("mtu", []) + type_list = device_data.get("type", []) + ip_list = device_data.get("ip_addresses", []) + prefix_list = device_data.get("prefix_length", []) + mac_list = device_data.get("mac_address", []) + description_list = device_data.get("description", []) + link_status_list = device_data.get("link_status", []) + self.logger.info(f"IP List {ip_list}") + self.logger.info(f"Prefix List {prefix_list}") + interface_dict = {} + for item in mtu_list: + interface_dict.setdefault(item["interface"], {})["mtu"] = item["mtu"] + for item in type_list: + interface_type = map_interface_type(item["type"]) + interface_dict.setdefault(item["interface"], {})["type"] = interface_type + for item in ip_list: + interface_dict.setdefault(item["interface"], {})["ip_addresses"] = {"host": item["ip_address"]} + self.logger.info(f"Interface Dict {interface_dict}") + for item in prefix_list: + interface_dict.setdefault(item["interface"], {}).setdefault("ip_addresses", {})["prefix_length"] = item[ + "prefix_length" + ] + for item in mac_list: + interface_dict.setdefault(item["interface"], {})["mac_address"] = item["mac_address"] + for item in description_list: + interface_dict.setdefault(item["interface"], {})["description"] = item["description"] + for item in link_status_list: + interface_dict.setdefault(item["interface"], {})["enabled"] = ( + True if item["link_status"] == "up" else False + ) + + device_data["interfaces"] = interface_dict + device_data["serial"] = serial + + del device_data["mtu"] + del device_data["type"] + del device_data["ip_addresses"] + del device_data["prefix_length"] + del device_data["mac_address"] + del device_data["description"] + del device_data["link_status"] + return compiled_results diff --git a/nautobot_device_onboarding/nornir_plays/processor.py b/nautobot_device_onboarding/nornir_plays/processor.py index 8392b79b..1e03940e 100755 --- a/nautobot_device_onboarding/nornir_plays/processor.py +++ b/nautobot_device_onboarding/nornir_plays/processor.py @@ -77,6 +77,7 @@ def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult # if not result.failed: formatted_data = extract_show_data(host, result, task.parent_task.params["command_getter_job"]) # revist should be able to just update self.data with full formatted_data + self.logger.info(f"Formatted Data: {formatted_data}", extra={"object": task.host}) for k, v in formatted_data.items(): self.data[host.name][k] = v diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index c27167e0..184eeba8 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -8,7 +8,7 @@ from django.utils.module_loading import import_string from jdiff import extract_data_from_json from jinja2.sandbox import SandboxedEnvironment - +from nautobot_device_onboarding.constants import INTERFACE_TYPE_MAP_STATIC from nautobot_device_onboarding.utils.jinja_filters import fix_interfaces DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), "command_mappers")) @@ -58,15 +58,19 @@ def perform_data_extraction(host, dict_field, command_info_dict, j2_env, task_re if isinstance(task_result.result, str): try: result_to_json = json.loads(task_result.result) + print("result_to_json_1: ", result_to_json) extracted_value = extract_data_from_json(result_to_json, j2_rendered_jpath) except json.decoder.JSONDecodeError: extracted_value = None else: + print(f"result_to_json_2: {task_result.result}") extracted_value = extract_data_from_json(task_result.result, j2_rendered_jpath) if show_command.get("post_processor"): template = j2_env.from_string(show_command["post_processor"]) + print(f"extracted_value_2: {extracted_value}") extracted_processed = template.render({"obj": extracted_value, "original_host": host.name}) else: + print(f"extracted_value_3: {extracted_value}") extracted_processed = extracted_value if isinstance(extracted_value, list) and len(extracted_value) == 1: extracted_processed = extracted_value[0] @@ -102,11 +106,18 @@ def extract_show_data(host, multi_result, command_getter_type): for default_dict_field, command_info in command_jpaths[command_getter_type].items(): if command_info.get("commands"): # Means their isn't any "nested" structures. Therefore not expected to see "validator_pattern key" + print(f"default dict field: {default_dict_field}") result = perform_data_extraction(host, default_dict_field, command_info, jinja_env, multi_result[0]) final_result_dict.update(result) else: # Means their is a "nested" structures. Priority for dict_field, nested_command_info in command_info.items(): + print(f"default dict field: {default_dict_field}") result = perform_data_extraction(host, dict_field, nested_command_info, jinja_env, multi_result[0]) final_result_dict.update(result) return final_result_dict + + +def map_interface_type(interface_type): + "Map interface type to a Nautobot type." + return INTERFACE_TYPE_MAP_STATIC.get(interface_type, "other") From a4ceb87d27867e56d1a781eb60af0f4a11f2abe7 Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Thu, 14 Mar 2024 22:28:14 +0000 Subject: [PATCH 155/225] changed ip address to list --- nautobot_device_onboarding/jobs.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index de0e6c23..c94c093b 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -727,6 +727,11 @@ def run(self, *args, **kwargs): True if item["link_status"] == "up" else False ) + for interface, data in interface_dict.items(): + ip_addresses = data.get("ip_addresses", {}) + if ip_addresses: + data["ip_addresses"] = [ip_addresses] + device_data["interfaces"] = interface_dict device_data["serial"] = serial From 44b4e1619ef4ed430671ed44550765bbe0086856 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Fri, 15 Mar 2024 16:20:07 -0500 Subject: [PATCH 156/225] fix yamllint, pylint and juniper yaml jpaths --- .github/workflows/ci.yml | 6 +- .../command_mappers/cisco_ios.yml | 118 +++++++++--------- .../command_mappers/juniper_junos.yml | 12 +- nautobot_device_onboarding/jobs.py | 10 +- nautobot_device_onboarding/utils/formatter.py | 2 +- 5 files changed, 72 insertions(+), 76 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 43c3be15..287aef58 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -193,9 +193,9 @@ jobs: - python-version: "3.11" db-backend: "postgresql" nautobot-version: "2.1.1" - # - python-version: "3.11" - # db-backend: "mysql" - # nautobot-version: "stable" + # - python-version: "3.11" + # db-backend: "mysql" + # nautobot-version: "stable" runs-on: "ubuntu-22.04" env: INVOKE_NAUTOBOT_DEVICE_ONBOARDING_PYTHON_VER: "${{ matrix.python-version }}" diff --git a/nautobot_device_onboarding/command_mappers/cisco_ios.yml b/nautobot_device_onboarding/command_mappers/cisco_ios.yml index 600f15d1..9dcb922d 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_ios.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_ios.yml @@ -40,62 +40,62 @@ network_importer: # - command: "show interfaces" # use_textfsm: true # jpath: "[*].{interface: interface}" - type: - commands: - - command: "show interfaces" - use_textfsm: true - jpath: "[*].{interface: interface, type: hardware_type}" - ip_addresses: - commands: - - command: "show interfaces" - use_textfsm: true - jpath: "[*].{interface: interface, ip_address: ip_address}" - prefix_length: - commands: - - command: "show interfaces" - use_textfsm: true - jpath: "[*].{interface: interface, prefix_length: prefix_length}" - mtu: - commands: - - command: "show interfaces" - use_textfsm: true - jpath: "[*].{interface: interface, mtu: mtu}" - mac_address: - commands: - - command: "show interfaces" - use_textfsm: true - jpath: "[*].{interface: interface, mac_address: mac_address}" - description: - commands: - - command: "show interfaces" - use_textfsm: true - jpath: "[*].{interface: interface, description: description}" - link_status: - commands: - - command: "show interfaces" - use_textfsm: true - jpath: "[*].{interface: interface, link_status: link_status}" - # dot1q_mode: - # commands: - # # - command: "show interfaces switchport" - # # use_textfsm: true - # # jpath: "[*].mode" - # - command: "show interfaces switchport" - # use_textfsm: true - # jpath: "[*].{interface: interface, dot1q_mode: admin_mode}" - # validator_pattern: "not None" - # # lag: - # # commands: - # # - command: "show etherchannel summary" - # # use_textfsm: true - # # jpath: "[*].protocol" - # # untagged_vlan: - # # commands: - # # - command: "show vlans" - # # use_textfsm: true - # # jpath: "[*].vlan_id" - # # tagged_vlans: - # # commands: - # # - command: "show vlans" - # # use_textfsm: true - # # jpath: "[*].vlan_id" + type: + commands: + - command: "show interfaces" + use_textfsm: true + jpath: "[*].{interface: interface, type: hardware_type}" + ip_addresses: + commands: + - command: "show interfaces" + use_textfsm: true + jpath: "[*].{interface: interface, ip_address: ip_address}" + prefix_length: + commands: + - command: "show interfaces" + use_textfsm: true + jpath: "[*].{interface: interface, prefix_length: prefix_length}" + mtu: + commands: + - command: "show interfaces" + use_textfsm: true + jpath: "[*].{interface: interface, mtu: mtu}" + mac_address: + commands: + - command: "show interfaces" + use_textfsm: true + jpath: "[*].{interface: interface, mac_address: mac_address}" + description: + commands: + - command: "show interfaces" + use_textfsm: true + jpath: "[*].{interface: interface, description: description}" + link_status: + commands: + - command: "show interfaces" + use_textfsm: true + jpath: "[*].{interface: interface, link_status: link_status}" + # dot1q_mode: + # commands: + # # - command: "show interfaces switchport" + # # use_textfsm: true + # # jpath: "[*].mode" + # - command: "show interfaces switchport" + # use_textfsm: true + # jpath: "[*].{interface: interface, dot1q_mode: admin_mode}" + # validator_pattern: "not None" + # # lag: + # # commands: + # # - command: "show etherchannel summary" + # # use_textfsm: true + # # jpath: "[*].protocol" + # # untagged_vlan: + # # commands: + # # - command: "show vlans" + # # use_textfsm: true + # # jpath: "[*].vlan_id" + # # tagged_vlans: + # # commands: + # # - command: "show vlans" + # # use_textfsm: true + # # jpath: "[*].vlan_id" diff --git a/nautobot_device_onboarding/command_mappers/juniper_junos.yml b/nautobot_device_onboarding/command_mappers/juniper_junos.yml index 6c3748e3..9770c091 100755 --- a/nautobot_device_onboarding/command_mappers/juniper_junos.yml +++ b/nautobot_device_onboarding/command_mappers/juniper_junos.yml @@ -4,30 +4,30 @@ device_onboarding: commands: - command: "show version | display json" use_textfsm: false - jpath: "'software-information'[].'host-name'[].data" + jpath: '"software-information"[]."host-name"[].data' # yamllint disable-line rule:quoted-strings serial: commands: - command: "show chassis hardware | display json" use_textfsm: false - jpath: "'chassis-inventory'[].'chassis'[].'serial-number'[].data" + jpath: '"chassis-inventory"[]."chassis"[]."serial-number"[].data' # yamllint disable-line rule:quoted-strings device_type: commands: - command: "show chassis hardware | display json" use_textfsm: false - jpath: "'chassis-inventory'[].'chassis'[].'description'[].data" + jpath: '"chassis-inventory"[]."chassis"[]."description"[].data' # yamllint disable-line rule:quoted-strings mgmt_interface: commands: - command: "show interfaces terse | display json" use_textfsm: false - jpath: "'interface-information'[].'physical-interface'[].'logical-interface'[].{name: name[].data, ip: 'address-family'[].'interface-address'[].'ifa-local'[].data}" + jpath: '"interface-information"[]."physical-interface"[]."logical-interface"[].{name: name[].data, ip: "address-family"[]."interface-address"[]."ifa-local"[].data}' # yamllint disable-line rule:quoted-strings post_processor: "{% for entry in obj %}{% if entry['ip'] is not none %}{% for ipaddr in entry['ip'] %}{% if original_host in ipaddr %}{{ entry['name'] | first }}{% endif %}{% endfor %}{% endif %}{% endfor %}" mask_length: commands: # - command: "show route protocol direct | display json" # use_textfsm: false - # jpath: '"route-information"[]."route-table"[]."rt"[]."rt-destination"[].data || [`31`]' + # jpath: ""route-information"[]."route-table"[]."rt"[]."rt-destination"[].data || [`31`]" # post_processor: "{% for ipaddr in obj %}{% if '/' in ipaddr and ipaddr.split('/')[0] == original_host %}{{ ipaddr.split('/')[1] }}{% endif %}{% endfor %}" - command: "show interfaces terse | display json" use_textfsm: false - jpath: "'route-information'[].'route-table'[].'rt'[].'rt-destination'[].data" + jpath: '"route-information"[]."route-table"[]."rt"[]."rt-destination"[].data' # yamllint disable-line rule:quoted-strings post_processor: "{% for ipaddr in obj %}{% if ipaddr.split('/')[0] == original_host %}{{ ipaddr.split('/')[1] }}{% endif %}{% endfor %}" diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index c94c093b..b5e7183b 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -31,7 +31,6 @@ from nautobot_device_onboarding.netdev_keeper import NetdevKeeper from nautobot_device_onboarding.utils.formatter import map_interface_type from nautobot_device_onboarding.nornir_plays.command_getter import command_getter_do, command_getter_ni -from netutils.interface import canonical_interface_name PLUGIN_SETTINGS = settings.PLUGINS_CONFIG["nautobot_device_onboarding"] @@ -723,15 +722,12 @@ def run(self, *args, **kwargs): for item in description_list: interface_dict.setdefault(item["interface"], {})["description"] = item["description"] for item in link_status_list: - interface_dict.setdefault(item["interface"], {})["enabled"] = ( - True if item["link_status"] == "up" else False - ) - - for interface, data in interface_dict.items(): + interface_dict.setdefault(item["interface"], {})["enabled"] = item["link_status"] == "up" + for _, data in interface_dict.items(): ip_addresses = data.get("ip_addresses", {}) if ip_addresses: data["ip_addresses"] = [ip_addresses] - + device_data["interfaces"] = interface_dict device_data["serial"] = serial diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index 184eeba8..e7816fb3 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -119,5 +119,5 @@ def extract_show_data(host, multi_result, command_getter_type): def map_interface_type(interface_type): - "Map interface type to a Nautobot type." + """Map interface type to a Nautobot type.""" return INTERFACE_TYPE_MAP_STATIC.get(interface_type, "other") From c553acf244a08d10ef370ea5ffa356608facbc71 Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Fri, 15 Mar 2024 21:55:16 +0000 Subject: [PATCH 157/225] updates to NI formatting --- .../adapters/network_importer_adapters.py | 14 ++-- nautobot_device_onboarding/jobs.py | 54 +------------ .../nornir_plays/command_getter.py | 8 +- nautobot_device_onboarding/utils/formatter.py | 75 +++++++++++++++++-- 4 files changed, 85 insertions(+), 66 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index eccaf579..eef0b7fe 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -62,7 +62,7 @@ def load_ip_addresses(self): """ ip_address_hosts = set() for _, device_data in self.job.command_getter_result.items(): - for interface in json.loads(device_data["interfaces"]): + for interface in device_data["interfaces"]: for _, interface_data in interface.items(): for ip_address in interface_data["ip_addresses"]: if ip_address: @@ -262,7 +262,7 @@ def load_devices(self): self.add(network_device) if self.job.debug: self.job.logger.debug(f"Device {network_device} loaded.") - for interface in json.loads(device_data["interfaces"]): + for interface in device_data["interfaces"]: for interface_name, interface_data in interface.items(): network_interface = self.load_interface(hostname, interface_name, interface_data) network_device.add_child(network_interface) @@ -292,7 +292,7 @@ def load_interface(self, hostname, interface_name, interface_data): def load_ip_addresses(self): """Load IP addresses into the DiffSync store.""" for hostname, device_data in self.job.command_getter_result.items(): # pylint: disable=too-many-nested-blocks - for interface in json.loads(device_data["interfaces"]): + for interface in device_data["interfaces"]: for interface_name, interface_data in interface.items(): for ip_address in interface_data["ip_addresses"]: if ip_address["ip_address"]: # the ip_address and mask_length may be empty, skip these @@ -323,7 +323,7 @@ def load_vlans(self): location_names[device.name] = device.location.name for hostname, device_data in self.job.command_getter_result.items(): # pylint: disable=too-many-nested-blocks - for interface in json.loads(device_data["interfaces"]): + for interface in device_data["interfaces"]: for _, interface_data in interface.items(): # add tagged vlans for tagged_vlan in interface_data["tagged_vlans"]: @@ -357,7 +357,7 @@ def load_vlans(self): def load_ip_address_to_interfaces(self): """Load ip address interface assignments into the Diffsync store.""" for hostname, device_data in self.job.command_getter_result.items(): # pylint: disable=too-many-nested-blocks - for interface in json.loads(device_data["interfaces"]): + for interface in device_data["interfaces"]: for interface_name, interface_data in interface.items(): for ip_address in interface_data["ip_addresses"]: if ip_address["ip_address"]: # the ip_address and mask_length may be empty, skip these @@ -379,7 +379,7 @@ def load_ip_address_to_interfaces(self): def load_tagged_vlans_to_interface(self): """Load tagged vlan to interface assignments into the Diffsync store.""" for hostname, device_data in self.job.command_getter_result.items(): - for interface in json.loads(device_data["interfaces"]): + for interface in device_data["interfaces"]: for interface_name, interface_data in interface.items(): network_tagged_vlans_to_interface = self.tagged_vlans_to_interface( diffsync=self, @@ -394,7 +394,7 @@ def load_tagged_vlans_to_interface(self): def load_lag_to_interface(self): """Load lag interface assignments into the Diffsync store.""" for hostname, device_data in self.job.command_getter_result.items(): - for interface in json.loads(device_data["interfaces"]): + for interface in device_data["interfaces"]: for interface_name, interface_data in interface.items(): network_lag_to_interface = self.lag_to_interface( diffsync=self, diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index c94c093b..cdef75b8 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -523,6 +523,7 @@ def __init__(self, *args, **kwargs): # RESTORE THESE LINES WHEN NOT TESTING! # self.command_getter_result = None # Dict result from CommandGetter job + self.logger.info(f"Command Getter Result: {self.command_getter_result}") self.devices_to_load = None # Queryset consisting of devices that responded # RESTORE THESE LINES WHEN NOT TESTING! # @@ -536,7 +537,7 @@ class Meta: ) debug = BooleanVar(description="Enable for more verbose logging.") - sync_vlans = BooleanVar(default=True, description="Sync VLANs and interface VLAN assignments.") + sync_vlans = BooleanVar(default=False, description="Sync VLANs and interface VLAN assignments.") namespace = ObjectVar( model=Namespace, required=True, description="The namespace for all IP addresses created or updated in the sync." ) @@ -692,57 +693,6 @@ class Meta: def run(self, *args, **kwargs): """Run command getter.""" compiled_results = command_getter_ni(self.job_result, self.logger.getEffectiveLevel(), kwargs) - for device, device_data in compiled_results.items(): - self.logger.info(f"Device Data: {device_data}") - serial = Device.objects.get(name=device).serial - self.logger.info(f"Serial: {serial}") - mtu_list = device_data.get("mtu", []) - type_list = device_data.get("type", []) - ip_list = device_data.get("ip_addresses", []) - prefix_list = device_data.get("prefix_length", []) - mac_list = device_data.get("mac_address", []) - description_list = device_data.get("description", []) - link_status_list = device_data.get("link_status", []) - self.logger.info(f"IP List {ip_list}") - self.logger.info(f"Prefix List {prefix_list}") - interface_dict = {} - for item in mtu_list: - interface_dict.setdefault(item["interface"], {})["mtu"] = item["mtu"] - for item in type_list: - interface_type = map_interface_type(item["type"]) - interface_dict.setdefault(item["interface"], {})["type"] = interface_type - for item in ip_list: - interface_dict.setdefault(item["interface"], {})["ip_addresses"] = {"host": item["ip_address"]} - self.logger.info(f"Interface Dict {interface_dict}") - for item in prefix_list: - interface_dict.setdefault(item["interface"], {}).setdefault("ip_addresses", {})["prefix_length"] = item[ - "prefix_length" - ] - for item in mac_list: - interface_dict.setdefault(item["interface"], {})["mac_address"] = item["mac_address"] - for item in description_list: - interface_dict.setdefault(item["interface"], {})["description"] = item["description"] - for item in link_status_list: - interface_dict.setdefault(item["interface"], {})["enabled"] = ( - True if item["link_status"] == "up" else False - ) - - for interface, data in interface_dict.items(): - ip_addresses = data.get("ip_addresses", {}) - if ip_addresses: - data["ip_addresses"] = [ip_addresses] - - device_data["interfaces"] = interface_dict - device_data["serial"] = serial - - del device_data["mtu"] - del device_data["type"] - del device_data["ip_addresses"] - del device_data["prefix_length"] - del device_data["mac_address"] - del device_data["description"] - del device_data["link_status"] - return compiled_results diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index 0c82694c..3244cb5e 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -2,7 +2,7 @@ # pylint: disable=relative-beyond-top-level from django.conf import settings -from nautobot.dcim.models import Platform +from nautobot.dcim.models import Platform, Device from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from nautobot.extras.models import SecretsGroup from nautobot_plugin_nornir.constants import NORNIR_SETTINGS @@ -19,6 +19,7 @@ from nautobot_device_onboarding.nornir_plays.processor import ProcessorDO from nautobot_device_onboarding.utils.helper import add_platform_parsing_info from nautobot_device_onboarding.utils.inventory_creator import _set_inventory +from nautobot_device_onboarding.utils.formatter import map_interface_type, format_ios_results InventoryPluginRegister.register("nautobot-inventory", NautobotORMInventory) InventoryPluginRegister.register("empty-inventory", EmptyInventory) @@ -207,4 +208,9 @@ def command_getter_ni(job_result, log_level, kwargs): except Exception as err: # pylint: disable=broad-exception-caught logger.info("Error: %s", err) return err + + compiled_results = format_ios_results(compiled_results) + return compiled_results + + diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index 184eeba8..74bf72a1 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -10,6 +10,7 @@ from jinja2.sandbox import SandboxedEnvironment from nautobot_device_onboarding.constants import INTERFACE_TYPE_MAP_STATIC from nautobot_device_onboarding.utils.jinja_filters import fix_interfaces +from nautobot.dcim.models import Device DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), "command_mappers")) @@ -58,19 +59,15 @@ def perform_data_extraction(host, dict_field, command_info_dict, j2_env, task_re if isinstance(task_result.result, str): try: result_to_json = json.loads(task_result.result) - print("result_to_json_1: ", result_to_json) extracted_value = extract_data_from_json(result_to_json, j2_rendered_jpath) except json.decoder.JSONDecodeError: extracted_value = None else: - print(f"result_to_json_2: {task_result.result}") extracted_value = extract_data_from_json(task_result.result, j2_rendered_jpath) if show_command.get("post_processor"): template = j2_env.from_string(show_command["post_processor"]) - print(f"extracted_value_2: {extracted_value}") extracted_processed = template.render({"obj": extracted_value, "original_host": host.name}) else: - print(f"extracted_value_3: {extracted_value}") extracted_processed = extracted_value if isinstance(extracted_value, list) and len(extracted_value) == 1: extracted_processed = extracted_value[0] @@ -106,13 +103,11 @@ def extract_show_data(host, multi_result, command_getter_type): for default_dict_field, command_info in command_jpaths[command_getter_type].items(): if command_info.get("commands"): # Means their isn't any "nested" structures. Therefore not expected to see "validator_pattern key" - print(f"default dict field: {default_dict_field}") result = perform_data_extraction(host, default_dict_field, command_info, jinja_env, multi_result[0]) final_result_dict.update(result) else: # Means their is a "nested" structures. Priority for dict_field, nested_command_info in command_info.items(): - print(f"default dict field: {default_dict_field}") result = perform_data_extraction(host, dict_field, nested_command_info, jinja_env, multi_result[0]) final_result_dict.update(result) return final_result_dict @@ -121,3 +116,71 @@ def extract_show_data(host, multi_result, command_getter_type): def map_interface_type(interface_type): "Map interface type to a Nautobot type." return INTERFACE_TYPE_MAP_STATIC.get(interface_type, "other") + +def format_ios_results(compiled_results): + """Format the results of the show commands for IOS devices. + + Args: + compiled_results (dict): The compiled results from the Nornir task. + + Returns: + dict: The formatted results. + """ + for device, device_data in compiled_results.items(): + serial = Device.objects.get(name=device).serial + mtu_list = device_data.get("mtu", []) + type_list = device_data.get("type", []) + ip_list = device_data.get("ip_addresses", []) + prefix_list = device_data.get("prefix_length", []) + mac_list = device_data.get("mac_address", []) + description_list = device_data.get("description", []) + link_status_list = device_data.get("link_status", []) + interface_dict = {} + for item in mtu_list: + interface_dict.setdefault(item["interface"], {})["mtu"] = item["mtu"] + for item in type_list: + interface_type = map_interface_type(item["type"]) + interface_dict.setdefault(item["interface"], {})["type"] = interface_type + for item in ip_list: + interface_dict.setdefault(item["interface"], {})["ip_addresses"] = {"ip_address": item["ip_address"]} + for item in prefix_list: + interface_dict.setdefault(item["interface"], {}).setdefault("ip_addresses", {})["prefix_length"] = item[ + "prefix_length" + ] + for item in mac_list: + interface_dict.setdefault(item["interface"], {})["mac_address"] = item["mac_address"] + for item in description_list: + interface_dict.setdefault(item["interface"], {})["description"] = item["description"] + for item in link_status_list: + interface_dict.setdefault(item["interface"], {})["link_status"] = ( + True if item["link_status"] == "up" else False + ) + # Add missing keys with default values for David + for interface in interface_dict.values(): + interface.setdefault("802.1Q_mode", "") + interface.setdefault("lag", "") + interface.setdefault("untagged_vlan", {"name": "", "id": ""}) + interface.setdefault("tagged_vlans", [{"name": "", "id": ""}]) + + for interface, data in interface_dict.items(): + ip_addresses = data.get("ip_addresses", {}) + if ip_addresses: + data["ip_addresses"] = [ip_addresses] + + # Convert to nice list for David + interface_list = [] + for interface, data in interface_dict.items(): + interface_list.append({interface: data}) + + device_data["interfaces"] = interface_list + device_data["serial"] = serial + + del device_data["mtu"] + del device_data["type"] + del device_data["ip_addresses"] + del device_data["prefix_length"] + del device_data["mac_address"] + del device_data["description"] + del device_data["link_status"] + + return compiled_results From 8d153772c8719366e2106bd918b5979bff20bb56 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Fri, 15 Mar 2024 16:56:18 -0500 Subject: [PATCH 158/225] fix junos mgmt intrface post processor --- nautobot_device_onboarding/command_mappers/juniper_junos.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/command_mappers/juniper_junos.yml b/nautobot_device_onboarding/command_mappers/juniper_junos.yml index 9770c091..def14b65 100755 --- a/nautobot_device_onboarding/command_mappers/juniper_junos.yml +++ b/nautobot_device_onboarding/command_mappers/juniper_junos.yml @@ -20,7 +20,7 @@ device_onboarding: - command: "show interfaces terse | display json" use_textfsm: false jpath: '"interface-information"[]."physical-interface"[]."logical-interface"[].{name: name[].data, ip: "address-family"[]."interface-address"[]."ifa-local"[].data}' # yamllint disable-line rule:quoted-strings - post_processor: "{% for entry in obj %}{% if entry['ip'] is not none %}{% for ipaddr in entry['ip'] %}{% if original_host in ipaddr %}{{ entry['name'] | first }}{% endif %}{% endfor %}{% endif %}{% endfor %}" + post_processor: "{% for entry in obj %}{% if entry['ip'] %}{% for ipaddr in entry['ip'] %}{% if original_host in ipaddr %}{{ entry['name'] | first }}{% endif %}{% endfor %}{% endif %}{% endfor %}" mask_length: commands: # - command: "show route protocol direct | display json" From 7d6a12f80dab1e7ec13dd2fafb4a4ee40dc68651 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Fri, 15 Mar 2024 17:12:27 -0500 Subject: [PATCH 159/225] fix junos --- nautobot_device_onboarding/command_mappers/juniper_junos.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/command_mappers/juniper_junos.yml b/nautobot_device_onboarding/command_mappers/juniper_junos.yml index def14b65..ed42b9af 100755 --- a/nautobot_device_onboarding/command_mappers/juniper_junos.yml +++ b/nautobot_device_onboarding/command_mappers/juniper_junos.yml @@ -20,7 +20,7 @@ device_onboarding: - command: "show interfaces terse | display json" use_textfsm: false jpath: '"interface-information"[]."physical-interface"[]."logical-interface"[].{name: name[].data, ip: "address-family"[]."interface-address"[]."ifa-local"[].data}' # yamllint disable-line rule:quoted-strings - post_processor: "{% for entry in obj %}{% if entry['ip'] %}{% for ipaddr in entry['ip'] %}{% if original_host in ipaddr %}{{ entry['name'] | first }}{% endif %}{% endfor %}{% endif %}{% endfor %}" + post_processor: "{% for entry in obj %}{% if entry['ip'] %}{% for ipaddr in entry['ip'] %}{% if original_host in ipaddr %}{{ entry['name'] }}{% endif %}{% endfor %}{% endif %}{% endfor %}" mask_length: commands: # - command: "show route protocol direct | display json" From 477f40871593faf274e7be03487d02e7c9805a6c Mon Sep 17 00:00:00 2001 From: Jeff Kala <48843785+jeffkala@users.noreply.github.com> Date: Fri, 15 Mar 2024 23:19:28 -0600 Subject: [PATCH 160/225] Update juniper_junos.yml --- .../command_mappers/juniper_junos.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/juniper_junos.yml b/nautobot_device_onboarding/command_mappers/juniper_junos.yml index ed42b9af..725d6cf9 100755 --- a/nautobot_device_onboarding/command_mappers/juniper_junos.yml +++ b/nautobot_device_onboarding/command_mappers/juniper_junos.yml @@ -20,14 +20,14 @@ device_onboarding: - command: "show interfaces terse | display json" use_textfsm: false jpath: '"interface-information"[]."physical-interface"[]."logical-interface"[].{name: name[].data, ip: "address-family"[]."interface-address"[]."ifa-local"[].data}' # yamllint disable-line rule:quoted-strings - post_processor: "{% for entry in obj %}{% if entry['ip'] %}{% for ipaddr in entry['ip'] %}{% if original_host in ipaddr %}{{ entry['name'] }}{% endif %}{% endfor %}{% endif %}{% endfor %}" + post_processor: "{% for entry in obj %}{% if entry['ip'] %}{% for ipaddr in entry['ip'] %}{% if original_host in ipaddr %}{{ entry['name'] | first }}{% endif %}{% endfor %}{% endif %}{% endfor %}" mask_length: commands: # - command: "show route protocol direct | display json" # use_textfsm: false # jpath: ""route-information"[]."route-table"[]."rt"[]."rt-destination"[].data || [`31`]" # post_processor: "{% for ipaddr in obj %}{% if '/' in ipaddr and ipaddr.split('/')[0] == original_host %}{{ ipaddr.split('/')[1] }}{% endif %}{% endfor %}" - - command: "show interfaces terse | display json" + - command: "show route protocol direct | display json" use_textfsm: false jpath: '"route-information"[]."route-table"[]."rt"[]."rt-destination"[].data' # yamllint disable-line rule:quoted-strings - post_processor: "{% for ipaddr in obj %}{% if ipaddr.split('/')[0] == original_host %}{{ ipaddr.split('/')[1] }}{% endif %}{% endfor %}" + post_processor: "{% set mask = [] %}{% for ip_route in obj %}{% if ip_route | is_network %}{% if ip_route | ipaddress_network('version') == 4 %}{% if '139.65.209.82' | is_ip_within(ip_route) %}{% set _=mask.append(ip_route.split('/')[1]) %}{% endif %}{% endif %}{% endif %}{% endfor %}{{ mask | unique | first}}" From 8fc0e9d8a3475f5084199bf2cef2e7e05d4ae6a0 Mon Sep 17 00:00:00 2001 From: Jeff Kala <48843785+jeffkala@users.noreply.github.com> Date: Fri, 15 Mar 2024 23:26:28 -0600 Subject: [PATCH 161/225] Update juniper_junos.yml --- nautobot_device_onboarding/command_mappers/juniper_junos.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/command_mappers/juniper_junos.yml b/nautobot_device_onboarding/command_mappers/juniper_junos.yml index 725d6cf9..bfdec521 100755 --- a/nautobot_device_onboarding/command_mappers/juniper_junos.yml +++ b/nautobot_device_onboarding/command_mappers/juniper_junos.yml @@ -30,4 +30,4 @@ device_onboarding: - command: "show route protocol direct | display json" use_textfsm: false jpath: '"route-information"[]."route-table"[]."rt"[]."rt-destination"[].data' # yamllint disable-line rule:quoted-strings - post_processor: "{% set mask = [] %}{% for ip_route in obj %}{% if ip_route | is_network %}{% if ip_route | ipaddress_network('version') == 4 %}{% if '139.65.209.82' | is_ip_within(ip_route) %}{% set _=mask.append(ip_route.split('/')[1]) %}{% endif %}{% endif %}{% endif %}{% endfor %}{{ mask | unique | first}}" + post_processor: "{% set mask = [] %}{% for ip_route in obj %}{% if ip_route | is_network %}{% if ip_route | ipaddress_network('version') == 4 %}{% if original_host | is_ip_within(ip_route) %}{% set _=mask.append(ip_route.split('/')[1]) %}{% endif %}{% endif %}{% endif %}{% endfor %}{{ mask | unique | first}}" From 14b3fc15f9f45268ae09725b94293eb6f0e4f813 Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Tue, 19 Mar 2024 21:17:22 +0000 Subject: [PATCH 162/225] update ni add nxos --- .../command_mappers/cisco_nxos.yml | 41 +++++++ nautobot_device_onboarding/constants.py | 3 + nautobot_device_onboarding/jobs.py | 1 - .../nornir_plays/command_getter.py | 8 +- .../nornir_plays/processor.py | 1 - nautobot_device_onboarding/utils/formatter.py | 112 ++++++++++++++++-- .../utils/jinja_filters.py | 11 +- 7 files changed, 160 insertions(+), 17 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml index 5bae0cfc..52dd7bff 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml @@ -25,3 +25,44 @@ device_onboarding: - command: "show interface" use_textfsm: true jpath: "[?ip_address=='{{ obj }}'].prefix_length || [`31`]" +network_importer: + type: + commands: + - command: "show interface" + use_textfsm: true + jpath: "[*].{interface: interface, type: hardware_type}" + ip_addresses: + commands: + - command: "show interface" + use_textfsm: true + jpath: "[*].{interface: interface, ip_address: ip_address}" + prefix_length: + commands: + - command: "show interface" + use_textfsm: true + jpath: "[*].{interface: interface, prefix_length: prefix_length}" + mtu: + commands: + - command: "show interface" + use_textfsm: true + jpath: "[*].{interface: interface, mtu: mtu}" + mac_address: + commands: + - command: "show interface" + use_textfsm: true + jpath: "[*].{interface: interface, mac_address: mac_address}" + description: + commands: + - command: "show interface" + use_textfsm: true + jpath: "[*].{interface: interface, description: description}" + link_status: + commands: + - command: "show interface" + use_textfsm: true + jpath: "[*].{interface: interface, link_status: link_status}" + mode: + commands: + - command: "show interface" + use_textfsm: true + jpath: "[*].{interface: interface, mode: mode}" \ No newline at end of file diff --git a/nautobot_device_onboarding/constants.py b/nautobot_device_onboarding/constants.py index 747468b2..185d45c8 100644 --- a/nautobot_device_onboarding/constants.py +++ b/nautobot_device_onboarding/constants.py @@ -15,4 +15,7 @@ "Forty Gigabit Ethernet": "40gbase-t", "Ethernet SVI": "virtual", "EthernetChannel": "lag", + "1000/10000 Ethernet": "1000base-t", + "Port-channel": "lag", + "EtherSVI": "virtual", } diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index d92add9a..b9696b07 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -522,7 +522,6 @@ def __init__(self, *args, **kwargs): # RESTORE THESE LINES WHEN NOT TESTING! # self.command_getter_result = None # Dict result from CommandGetter job - self.logger.info(f"Command Getter Result: {self.command_getter_result}") self.devices_to_load = None # Queryset consisting of devices that responded # RESTORE THESE LINES WHEN NOT TESTING! # diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index 3244cb5e..e247c61c 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -19,7 +19,7 @@ from nautobot_device_onboarding.nornir_plays.processor import ProcessorDO from nautobot_device_onboarding.utils.helper import add_platform_parsing_info from nautobot_device_onboarding.utils.inventory_creator import _set_inventory -from nautobot_device_onboarding.utils.formatter import map_interface_type, format_ios_results +from nautobot_device_onboarding.utils.formatter import map_interface_type, format_results InventoryPluginRegister.register("nautobot-inventory", NautobotORMInventory) InventoryPluginRegister.register("empty-inventory", EmptyInventory) @@ -208,9 +208,7 @@ def command_getter_ni(job_result, log_level, kwargs): except Exception as err: # pylint: disable=broad-exception-caught logger.info("Error: %s", err) return err - - compiled_results = format_ios_results(compiled_results) - - return compiled_results + compiled_results = format_results(compiled_results) + return compiled_results diff --git a/nautobot_device_onboarding/nornir_plays/processor.py b/nautobot_device_onboarding/nornir_plays/processor.py index 1e03940e..8392b79b 100755 --- a/nautobot_device_onboarding/nornir_plays/processor.py +++ b/nautobot_device_onboarding/nornir_plays/processor.py @@ -77,7 +77,6 @@ def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult # if not result.failed: formatted_data = extract_show_data(host, result, task.parent_task.params["command_getter_job"]) # revist should be able to just update self.data with full formatted_data - self.logger.info(f"Formatted Data: {formatted_data}", extra={"object": task.host}) for k, v in formatted_data.items(): self.data[host.name][k] = v diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index 37fcbe2e..196b3c3a 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -54,23 +54,29 @@ def perform_data_extraction(host, dict_field, command_info_dict, j2_env, task_re for show_command in command_info_dict["commands"]: if show_command["command"] == task_result.name: jpath_template = j2_env.from_string(show_command["jpath"]) - j2_rendered_jpath = jpath_template.render({"obj": host.name}) + j2_rendered_jpath = jpath_template.render({"obj": host.name, "original_host": host.name}) + print(j2_rendered_jpath) if not task_result.failed: if isinstance(task_result.result, str): try: result_to_json = json.loads(task_result.result) extracted_value = extract_data_from_json(result_to_json, j2_rendered_jpath) + print(f"extraced value: {extracted_value}") except json.decoder.JSONDecodeError: extracted_value = None else: extracted_value = extract_data_from_json(task_result.result, j2_rendered_jpath) + print(f"extracted value 2: {extracted_value}") if show_command.get("post_processor"): template = j2_env.from_string(show_command["post_processor"]) extracted_processed = template.render({"obj": extracted_value, "original_host": host.name}) + print(f"extracted 1: {extracted_processed}") else: extracted_processed = extracted_value + print(f"extracted 2: {extracted_processed}") if isinstance(extracted_value, list) and len(extracted_value) == 1: extracted_processed = extracted_value[0] + print(f"extracted 3: {extracted_processed}") if command_info_dict.get("validator_pattern"): # temp validator if command_info_dict["validator_pattern"] == "not None": @@ -117,15 +123,71 @@ def map_interface_type(interface_type): """Map interface type to a Nautobot type.""" return INTERFACE_TYPE_MAP_STATIC.get(interface_type, "other") + def format_ios_results(compiled_results): - """Format the results of the show commands for IOS devices. + "Format the results of the show commands for IOS devices." + for device, device_data in compiled_results.items(): + serial = Device.objects.get(name=device).serial + mtu_list = device_data.get("mtu", []) + type_list = device_data.get("type", []) + ip_list = device_data.get("ip_addresses", []) + prefix_list = device_data.get("prefix_length", []) + mac_list = device_data.get("mac_address", []) + description_list = device_data.get("description", []) + link_status_list = device_data.get("link_status", []) - Args: - compiled_results (dict): The compiled results from the Nornir task. + interface_dict = {} + for item in mtu_list: + interface_dict.setdefault(item["interface"], {})["mtu"] = item["mtu"] + for item in type_list: + interface_type = map_interface_type(item["type"]) + interface_dict.setdefault(item["interface"], {})["type"] = interface_type + for item in ip_list: + interface_dict.setdefault(item["interface"], {})["ip_addresses"] = {"ip_address": item["ip_address"]} + for item in prefix_list: + interface_dict.setdefault(item["interface"], {}).setdefault("ip_addresses", {})["prefix_length"] = item[ + "prefix_length" + ] + for item in mac_list: + interface_dict.setdefault(item["interface"], {})["mac_address"] = item["mac_address"] + for item in description_list: + interface_dict.setdefault(item["interface"], {})["description"] = item["description"] + for item in link_status_list: + interface_dict.setdefault(item["interface"], {})["link_status"] = ( + True if item["link_status"] == "up" else False + ) - Returns: - dict: The formatted results. - """ + for interface in interface_dict.values(): + interface.setdefault("802.1Q_mode", "") + interface.setdefault("lag", "") + interface.setdefault("untagged_vlan", {"name": "", "id": ""}) + interface.setdefault("tagged_vlans", [{"name": "", "id": ""}]) + + for interface, data in interface_dict.items(): + ip_addresses = data.get("ip_addresses", {}) + if ip_addresses: + data["ip_addresses"] = [ip_addresses] + + interface_list = [] + for interface, data in interface_dict.items(): + interface_list.append({interface: data}) + + device_data["interfaces"] = interface_list + device_data["serial"] = serial + + del device_data["mtu"] + del device_data["type"] + del device_data["ip_addresses"] + del device_data["prefix_length"] + del device_data["mac_address"] + del device_data["description"] + del device_data["link_status"] + + return compiled_results + + +def format_nxos_results(compiled_results): + "Format the results of the show commands for NX-OS devices." for device, device_data in compiled_results.items(): serial = Device.objects.get(name=device).serial mtu_list = device_data.get("mtu", []) @@ -135,6 +197,7 @@ def format_ios_results(compiled_results): mac_list = device_data.get("mac_address", []) description_list = device_data.get("description", []) link_status_list = device_data.get("link_status", []) + mode_list = device_data.get("mode", []) interface_dict = {} for item in mtu_list: interface_dict.setdefault(item["interface"], {})["mtu"] = item["mtu"] @@ -155,7 +218,11 @@ def format_ios_results(compiled_results): interface_dict.setdefault(item["interface"], {})["link_status"] = ( True if item["link_status"] == "up" else False ) - # Add missing keys with default values for David + for item in mode_list: + interface_dict.setdefault(item["interface"], {})["802.1Q_mode"] = ( + "access" if item["mode"] == "access" else "tagged" if item["mode"] == "trunk" else "" + ) + for interface in interface_dict.values(): interface.setdefault("802.1Q_mode", "") interface.setdefault("lag", "") @@ -167,7 +234,6 @@ def format_ios_results(compiled_results): if ip_addresses: data["ip_addresses"] = [ip_addresses] - # Convert to nice list for David interface_list = [] for interface, data in interface_dict.items(): interface_list.append({interface: data}) @@ -182,5 +248,33 @@ def format_ios_results(compiled_results): del device_data["mac_address"] del device_data["description"] del device_data["link_status"] + del device_data["mode"] + + return compiled_results + + +def format_junos_results(compiled_results): + pass + + +def format_results(compiled_results): + """Format the results of the show commands for IOS devices. + + Args: + compiled_results (dict): The compiled results from the Nornir task. + + Returns: + dict: The formatted results. + """ + for device in compiled_results: + platform = compiled_results[device]["platform"] + if platform in ["cisco_ios", "cisco_xe"]: + format_ios_results(compiled_results) + elif platform == "cisco_nxos": + format_nxos_results(compiled_results) + elif platform == "juniper_junos": + format_junos_results(compiled_results) + else: + raise ValueError(f"Unsupported platform {platform}") return compiled_results diff --git a/nautobot_device_onboarding/utils/jinja_filters.py b/nautobot_device_onboarding/utils/jinja_filters.py index d9887424..1f82cd78 100755 --- a/nautobot_device_onboarding/utils/jinja_filters.py +++ b/nautobot_device_onboarding/utils/jinja_filters.py @@ -1,16 +1,24 @@ """Filters for Jinja2 PostProcessing.""" +from nautobot_device_onboarding.constants import INTERFACE_TYPE_MAP_STATIC + + +def map_interface_type(interface_type): + """Map interface type to a Nautobot type.""" + return INTERFACE_TYPE_MAP_STATIC.get(interface_type, "other") + def fix_interfaces(interfaces): """Prep interface formatting for SSoT.""" for interface in interfaces: for _, int_values in interface.items(): - int_values["type"] = "other" + int_values["type"] = map_interface_type(int_values.get("hardware_type", "")) int_values["802.1Q_mode"] = "" int_values["untagged_vlan"] = "" int_values["tagged_vlans"] = [] int_values["lag"] = "" int_values["ip_addresses"] = [] + int_values["mtu"] = "" int_values["ip_addresses"].append( {"ip_address": int_values.get("ip_address", ""), "prefix_length": int_values.get("prefix_length", "")} ) @@ -18,4 +26,5 @@ def fix_interfaces(interfaces): int_values["link_status"] = True else: int_values["link_status"] = False + return interfaces From ed1a1916eb71832294429500debb53942c726f61 Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Wed, 20 Mar 2024 20:17:40 +0000 Subject: [PATCH 163/225] update vlans --- nautobot_device_onboarding/utils/formatter.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index 196b3c3a..620fc3d8 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -160,8 +160,8 @@ def format_ios_results(compiled_results): for interface in interface_dict.values(): interface.setdefault("802.1Q_mode", "") interface.setdefault("lag", "") - interface.setdefault("untagged_vlan", {"name": "", "id": ""}) - interface.setdefault("tagged_vlans", [{"name": "", "id": ""}]) + interface.setdefault("untagged_vlan", {}) + interface.setdefault("tagged_vlans", []) for interface, data in interface_dict.items(): ip_addresses = data.get("ip_addresses", {}) @@ -226,8 +226,8 @@ def format_nxos_results(compiled_results): for interface in interface_dict.values(): interface.setdefault("802.1Q_mode", "") interface.setdefault("lag", "") - interface.setdefault("untagged_vlan", {"name": "", "id": ""}) - interface.setdefault("tagged_vlans", [{"name": "", "id": ""}]) + interface.setdefault("untagged_vlan", {}) + interface.setdefault("tagged_vlans", []) for interface, data in interface_dict.items(): ip_addresses = data.get("ip_addresses", {}) @@ -254,8 +254,7 @@ def format_nxos_results(compiled_results): def format_junos_results(compiled_results): - pass - + return compiled_results def format_results(compiled_results): """Format the results of the show commands for IOS devices. From 19e3163b1264c16dc75081726ebac9838e14f849 Mon Sep 17 00:00:00 2001 From: David Cates Date: Wed, 20 Mar 2024 16:18:08 -0700 Subject: [PATCH 164/225] update NI adapters and models, add untagged vlan model --- .../adapters/network_importer_adapters.py | 76 ++++++- .../diffsync/mock_data.py | 14 +- .../models/network_importer_models.py | 197 ++++++++++++++---- nautobot_device_onboarding/jobs.py | 5 +- .../nornir_plays/command_getter.py | 4 +- nautobot_device_onboarding/utils/formatter.py | 9 +- pyproject.toml | 2 +- 7 files changed, 237 insertions(+), 70 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index eef0b7fe..44bf8d4d 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -1,7 +1,5 @@ """DiffSync adapters.""" -import json - import diffsync from diffsync.enum import DiffSyncModelFlags from django.core.exceptions import ValidationError @@ -38,6 +36,7 @@ class NetworkImporterNautobotAdapter(FilteredNautobotAdapter): ipaddress_to_interface = network_importer_models.NetworkImporterIPAddressToInterface vlan = network_importer_models.NetworkImporterVLAN tagged_vlans_to_interface = network_importer_models.NetworkImporterTaggedVlansToInterface + untagged_vlan_to_interface = network_importer_models.NetworkImporterUnTaggedVlanToInterface lag_to_interface = network_importer_models.NetworkImporterLagToInterface top_level = [ @@ -45,6 +44,7 @@ class NetworkImporterNautobotAdapter(FilteredNautobotAdapter): "vlan", "device", "ipaddress_to_interface", + "untagged_vlan_to_interface", "tagged_vlans_to_interface", "lag_to_interface", ] @@ -55,6 +55,14 @@ def load_param_mac_address(self, parameter_name, database_object): self.job.logger.debug(f"Converting {parameter_name}: {database_object.mac_address}") return str(database_object.mac_address) + def load_param_untagged_vlan__name(self, parameter_name, database_object): + """Load, or prevent loading, untagged vlans depending on form selection.""" + if not self.job.sync_vlans: + if self.job.debug: + self.job.logger.debug(f"{parameter_name} will not be synced to {database_object}") + return "" + return str(database_object.untagged_vlan.name) + def load_ip_addresses(self): """Load IP addresses into the DiffSync store. @@ -67,7 +75,8 @@ def load_ip_addresses(self): for ip_address in interface_data["ip_addresses"]: if ip_address: ip_address_hosts.add(ip_address["ip_address"]) - ip_address_hosts.remove("") # do not attempt to filter ip addresses with empty strings + if "" in ip_address_hosts: + ip_address_hosts.remove("") # do not attempt to filter ip addresses with empty strings for ip_address in IPAddress.objects.filter( host__in=ip_address_hosts, parent__namespace__name=self.job.namespace.name, @@ -133,7 +142,29 @@ def load_tagged_vlans_to_interface(self): network_tagged_vlans_to_interface.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST self.add(network_tagged_vlans_to_interface) if self.job.debug: - self.job.logger.debug(f"Vlan to interface: {network_tagged_vlans_to_interface} loaded.") + self.job.logger.debug(f"Tagged Vlan to interface: {network_tagged_vlans_to_interface} loaded.") + + def load_untagged_vlan_to_interface(self): + """Load a model representing untagged vlan assignments to the Diffsync store. + + Only Vlan assignments that were returned by the CommandGetter job should be loaded. + """ + for interface in Interface.objects.filter(device__in=self.job.devices_to_load): + untagged_vlan = {} + if interface.untagged_vlan: + untagged_vlan["name"] = interface.untagged_vlan.name + untagged_vlan["id"] = str(interface.untagged_vlan.vid) + + network_untagged_vlan_to_interface = self.untagged_vlan_to_interface( + diffsync=self, + device__name=interface.device.name, + name=interface.name, + untagged_vlan=untagged_vlan, + ) + network_untagged_vlan_to_interface.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(network_untagged_vlan_to_interface) + if self.job.debug: + self.job.logger.debug(f"Unagged Vlan to interface: {network_untagged_vlan_to_interface} loaded.") def load_lag_to_interface(self): """ @@ -167,6 +198,9 @@ def load(self): elif model_name == "tagged_vlans_to_interface": if self.job.sync_vlans: self.load_tagged_vlans_to_interface() + elif model_name == "untagged_vlan_to_interface": + if self.job.sync_vlans: + self.load_untagged_vlan_to_interface() elif model_name == "lag_to_interface": self.load_lag_to_interface() else: @@ -195,6 +229,7 @@ def __init__(self, *args, job, sync=None, **kwargs): ipaddress_to_interface = network_importer_models.NetworkImporterIPAddressToInterface vlan = network_importer_models.NetworkImporterVLAN tagged_vlans_to_interface = network_importer_models.NetworkImporterTaggedVlansToInterface + untagged_vlan_to_interface = network_importer_models.NetworkImporterUnTaggedVlanToInterface lag_to_interface = network_importer_models.NetworkImporterLagToInterface top_level = [ @@ -202,6 +237,7 @@ def __init__(self, *args, job, sync=None, **kwargs): "vlan", "device", "ipaddress_to_interface", + "untagged_vlan_to_interface", "tagged_vlans_to_interface", "lag_to_interface", ] @@ -244,8 +280,7 @@ def execute_command_getter(self): self._handle_failed_devices(device_data=result) else: self.job.logger.error( - "Data returned from CommandGetter is not the correct type. " - "No devices will be onboarded, check the CommandGetter job logs." + "Data returned from CommandGetter is not the correct type. " "No devices will be onboarded" ) raise ValidationError("Unexpected data returend from CommandGetter.") @@ -253,7 +288,7 @@ def _process_mac_address(self, mac_address): """Convert a mac address to match the value stored by Nautobot.""" if mac_address: return str(EUI(mac_address, version=48, dialect=MacUnixExpandedUppercase)) - return "" + return "None" def load_devices(self): """Load devices into the DiffSync store.""" @@ -269,6 +304,13 @@ def load_devices(self): if self.job.debug: self.job.logger.debug(f"Interface {network_interface} loaded.") + def _get_vlan_name(self, interface_data): + """Given interface data returned from a device, process and return the vlan name.""" + vlan_name = "" + if self.job.sync_vlans: + vlan_name = interface_data["untagged_vlan"]["name"] if interface_data["untagged_vlan"] else "" + return vlan_name + def load_interface(self, hostname, interface_name, interface_data): """Load an interface into the DiffSync store.""" network_interface = self.interface( @@ -282,7 +324,7 @@ def load_interface(self, hostname, interface_name, interface_data): description=interface_data["description"], enabled=interface_data["link_status"], mode=interface_data["802.1Q_mode"], - untagged_vlan__name=interface_data["untagged_vlan"]["name"] if interface_data["untagged_vlan"] else None, + untagged_vlan__name=self._get_vlan_name(interface_data=interface_data), ) self.add(network_interface) if self.job.debug: @@ -391,6 +433,23 @@ def load_tagged_vlans_to_interface(self): if self.job.debug: self.job.logger.debug(f"Tagged Vlan to interface {network_tagged_vlans_to_interface} loaded.") + def load_untagged_vlan_to_interface(self): + """Load untagged vlan to interface assignments into the Diffsync store.""" + for hostname, device_data in self.job.command_getter_result.items(): + for interface in device_data["interfaces"]: + for interface_name, interface_data in interface.items(): + network_untagged_vlan_to_interface = self.untagged_vlan_to_interface( + diffsync=self, + device__name=hostname, + name=interface_name, + untagged_vlan=interface_data["untagged_vlan"], + ) + self.add(network_untagged_vlan_to_interface) + if self.job.debug: + self.job.logger.debug( + f"Untagged Vlan to interface {network_untagged_vlan_to_interface} loaded." + ) + def load_lag_to_interface(self): """Load lag interface assignments into the Diffsync store.""" for hostname, device_data in self.job.command_getter_result.items(): @@ -416,4 +475,5 @@ def load(self): self.load_ip_address_to_interfaces() if self.job.sync_vlans: self.load_tagged_vlans_to_interface() + self.load_untagged_vlan_to_interface() self.load_lag_to_interface() diff --git a/nautobot_device_onboarding/diffsync/mock_data.py b/nautobot_device_onboarding/diffsync/mock_data.py index c8b41560..b337c92e 100644 --- a/nautobot_device_onboarding/diffsync/mock_data.py +++ b/nautobot_device_onboarding/diffsync/mock_data.py @@ -34,7 +34,7 @@ "link_status": True, "802.1Q_mode": "", "lag": "Po2", - "untagged_vlan": "", + "untagged_vlan": {}, "tagged_vlans": [], } }, @@ -51,7 +51,7 @@ "link_status": True, "802.1Q_mode": "tagged", "lag": "Po1", - "untagged_vlan": "", + "untagged_vlan": {}, "tagged_vlans": [{"name": "vlan40", "id": "40"}, {"name": "vlan50", "id": "50"}], } }, @@ -67,7 +67,7 @@ "link_status": True, "802.1Q_mode": "", "lag": "", - "untagged_vlan": "", + "untagged_vlan": {}, "tagged_vlans": [], } }, @@ -81,7 +81,7 @@ "link_status": True, "802.1Q_mode": "", "lag": "", - "untagged_vlan": "", + "untagged_vlan": {}, "tagged_vlans": [], } }, @@ -132,7 +132,7 @@ "link_status": True, "802.1Q_mode": "", "lag": "Po1", - "untagged_vlan": "", + "untagged_vlan": {}, "tagged_vlans": [], } }, @@ -149,7 +149,7 @@ "link_status": True, "802.1Q_mode": "tagged", "lag": "Po1", - "untagged_vlan": "", + "untagged_vlan": {}, "tagged_vlans": [{"name": "vlan40", "id": "40"}, {"name": "vlan50", "id": "50"}], } }, @@ -163,7 +163,7 @@ "link_status": True, "802.1Q_mode": "", "lag": "", - "untagged_vlan": "", + "untagged_vlan": {}, "tagged_vlans": [], } }, diff --git a/nautobot_device_onboarding/diffsync/models/network_importer_models.py b/nautobot_device_onboarding/diffsync/models/network_importer_models.py index 169afa03..0609c400 100644 --- a/nautobot_device_onboarding/diffsync/models/network_importer_models.py +++ b/nautobot_device_onboarding/diffsync/models/network_importer_models.py @@ -3,6 +3,7 @@ from typing import List, Optional from diffsync import DiffSync, DiffSyncModel +from diffsync import exceptions as diffsync_exceptions from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist, ValidationError from nautobot.dcim.choices import InterfaceTypeChoices from nautobot.dcim.models import Device, Interface, Location @@ -100,7 +101,6 @@ class NetworkImporterInterface(FilteredNautobotModel): "mtu", # "parent_interface__name", "mode", - "untagged_vlan__name", "enabled", "description", ) @@ -115,7 +115,6 @@ class NetworkImporterInterface(FilteredNautobotModel): parent_interface__name: Optional[str] lag__name: Optional[str] mode: Optional[str] - untagged_vlan__name: Optional[str] enabled: Optional[bool] description: Optional[str] @@ -136,7 +135,7 @@ class NetworkImporterIPAddress(DiffSyncModel): @classmethod def create(cls, diffsync, ids, attrs): - """Create a new IPAddressToInterface object.""" + """Create a new IPAddress object.""" diffsync_utils.get_or_create_ip_address( host=ids["host"], mask_length=attrs["mask_length"], @@ -148,7 +147,7 @@ def create(cls, diffsync, ids, attrs): return super().create(diffsync, ids, attrs) def update(self, attrs): - """Update an existing IPAddressToInterface object.""" + """Update an existing IPAddress object.""" try: ip_address = IPAddress.objects.get(host=self.host, parent__namespace=self.diffsync.job.namespace) except ObjectDoesNotExist as err: @@ -241,12 +240,9 @@ class NetworkImporterTaggedVlansToInterface(DiffSyncModel): tagged_vlans: Optional[list] - # TODO: move the create and update method logic to a single utility function @classmethod - def create(cls, diffsync, ids, attrs): - """Assign tagged vlans to an interface.""" - interface = Interface.objects.get(device__name=ids["device__name"], name=ids["name"]) - + def _get_and_assign_tagged_vlans(cls, diffsync, attrs, interface): + """Loop through the tagged vlans for an interface and assign them.""" for network_vlan in attrs["tagged_vlans"]: try: nautobot_vlan = VLAN.objects.get( @@ -259,38 +255,129 @@ def create(cls, diffsync, ids, attrs): f"with attributes [name: {network_vlan['name']}, vid: {network_vlan['id']} " f"location: {interface.device.location}]" ) - try: - interface.validated_save() - except ValidationError as err: - diffsync.job.logger.error( - f"Failed to assign tagged vlans {attrs['tagged_vlans']} to {interface} on {interface.device}, {err}" - ) + raise diffsync_exceptions.ObjectNotCreated + + @classmethod + def create(cls, diffsync, ids, attrs): + """Assign tagged vlans to an interface.""" + if attrs.get("tagged_vlans"): + try: + interface = Interface.objects.get(device__name=ids["device__name"], name=ids["name"]) + except ObjectDoesNotExist: + diffsync.job.logger.error( + f"Failed to assign tagged vlans {attrs['tagged_vlans']}. An interface with " + f"attributes: [device__name: {ids['device__name']} name: {ids['name']}] was not found." + ) + raise diffsync_exceptions.ObjectNotCreated + cls._get_and_assign_tagged_vlans(diffsync, attrs, interface) + if interface: + try: + interface.validated_save() + except ValidationError as err: + diffsync.job.logger.error( + f"Failed to assign tagged vlans {attrs['tagged_vlans']} to {interface} on {interface.device}, {err}" + ) + raise diffsync_exceptions.ObjectNotCreated return super().create(diffsync, ids, attrs) def update(self, attrs): """Update tagged vlans.""" - interface = Interface.objects.get(**self.get_identifiers()) - interface.tagged_vlans.clear() - - for network_vlan in attrs["tagged_vlans"]: + if attrs.get("tagged_vlans"): try: - nautobot_vlan = VLAN.objects.get( - name=network_vlan["name"], vid=network_vlan["id"], location=interface.device.location - ) - interface.tagged_vlans.add(nautobot_vlan) + interface = Interface.objects.get(**self.get_identifiers()) + interface.tagged_vlans.clear() except ObjectDoesNotExist: self.diffsync.job.logger.error( - f"Failed to assign tagged vlan to {interface}, unable to locate a vlan " - f"with attributes [name: {network_vlan['name']}, vid: {network_vlan['id']} " - f"location: {interface.device.location}]" + f"Failed to assign tagged vlans {attrs['tagged_vlans']}. An interface with " + f"attributes: [{self.get_identifiers}] was not found." ) + raise diffsync_exceptions.ObjectNotUpdated + self._get_and_assign_tagged_vlans(self.diffsync, attrs, interface) + try: + interface.validated_save() + except ValidationError as err: + self.diffsync.job.logger.error( + f"Failed to assign tagged vlans {attrs['tagged_vlans']} to {interface} on {interface.device}, {err}" + ) + raise diffsync_exceptions.ObjectNotUpdated + return super().update(attrs) + + +class NetworkImporterUnTaggedVlanToInterface(DiffSyncModel): + """Shared data model representing a UnTaggedVlanToInterface.""" + + _modelname = "untagged_vlan_to_interface" + _identifiers = ("device__name", "name") + _attributes = ("untagged_vlan",) + + device__name: str + name: str + + untagged_vlan: Optional[dict] + + @classmethod + def _get_and_assign_untagged_vlan(cls, diffsync, attrs, interface): + """Assign an untagged vlan to an interface.""" try: - interface.validated_save() - except ValidationError as err: - self.diffsync.job.logger.error( - f"Failed to assign tagged vlans {attrs['tagged_vlans']} to {interface} on {interface.device}, {err}" + vlan = VLAN.objects.get( + name=attrs["untagged_vlan"]["name"], + vid=attrs["untagged_vlan"]["id"], + location=interface.device.location, ) + interface.untagged_vlan = vlan + except ObjectDoesNotExist: + diffsync.job.logger.error( + f"Failed to assign untagged vlan to {interface}, unable to locate a vlan with " + f"attributes [name: {attrs['untagged_vlan']['name']}, vid: {attrs['untagged_vlan']['id']} " + f"location: {interface.device.location}]" + ) + raise diffsync_exceptions.ObjectNotCreated + + @classmethod + def create(cls, diffsync, ids, attrs): + """Assign an untagged vlan to an interface.""" + if attrs.get("untagged_vlan"): + try: + interface = Interface.objects.get(device__name=ids["device__name"], name=ids["name"]) + except ObjectDoesNotExist: + diffsync.job.logger.error( + f"Failed to assign untagged vlan {attrs['untagged_vlan']}. An interface with " + f"attributes: [device__name: {ids['device__name']} name: {ids['name']}] was not found." + ) + raise diffsync_exceptions.ObjectNotCreated + if attrs.get("untagged_vlan"): + cls._get_and_assign_untagged_vlan(diffsync, attrs, interface) + if interface: + try: + interface.validated_save() + except ValidationError as err: + diffsync.job.logger.error( + f"Failed to assign untagged vlan {attrs['untagged_vlan']} to {interface} on {interface.device}, {err}" + ) + raise diffsync_exceptions.ObjectNotCreated + return super().create(diffsync, ids, attrs) + def update(self, attrs): + """Update the untagged vlan on an interface.""" + if attrs.get("untagged_vlan"): + try: + interface = Interface.objects.get(**self.get_identifiers()) + except ObjectDoesNotExist: + self.diffsync.job.logger.error( + f"Failed to assign untagged vlan {attrs['untagged_vlan']}. An interface with " + f"attributes: [{self.get_identifiers}] was not found." + ) + raise diffsync_exceptions.ObjectNotUpdated + if attrs.get("untagged_vlan"): + self._get_and_assign_untagged_vlan(self.diffsync, attrs, interface) + if interface: + try: + interface.validated_save() + except ValidationError as err: + self.diffsync.job.logger.error( + f"Failed to assign untagged vlans {attrs['untagged_vlan']} to {interface} on {interface.device}, {err}" + ) + raise diffsync_exceptions.ObjectNotUpdated return super().update(attrs) @@ -310,29 +397,47 @@ class NetworkImporterLagToInterface(DiffSyncModel): @classmethod def create(cls, diffsync, ids, attrs): """Assign a lag to an interface.""" - if attrs["lag__interface__name"]: # Prevent the sync from attempting to assign lag interface names of 'None' - interface = Interface.objects.get(device__name=ids["device__name"], name=ids["name"]) + if attrs["lag__interface__name"]: try: - lag_interface = Interface.objects.get( - name=attrs["lag__interface__name"], device=interface.device, type=InterfaceTypeChoices.TYPE_LAG - ) - interface.lag = lag_interface - interface.validated_save() + interface = Interface.objects.get(device__name=ids["device__name"], name=ids["name"]) except ObjectDoesNotExist: diffsync.job.logger.error( - f"Failed to assign lag to {interface}, unable to locate a lag interface " - f"with attributes [name: {attrs['lag__interface__name']}, device: {interface.device.name} " - f"type: {InterfaceTypeChoices.TYPE_LAG}]" - ) - except ValidationError as err: - diffsync.job.logger.error( - f"Failed to assign lag {lag_interface} to {interface} on {interface.device}, {err}" + f"Failed to assign lag {attrs['lag__interface__name']}. An interface with " + f"attributes: [device__name: {ids['device__name']} name: {ids['name']}] was not found." ) + raise diffsync_exceptions.ObjectNotCreated + if interface: + try: + lag_interface = Interface.objects.get( + name=attrs["lag__interface__name"], device=interface.device, type=InterfaceTypeChoices.TYPE_LAG + ) + interface.lag = lag_interface + interface.validated_save() + except ObjectDoesNotExist: + diffsync.job.logger.error( + f"Failed to assign lag to {interface}, unable to locate a lag interface " + f"with attributes [name: {attrs['lag__interface__name']}, device: {interface.device.name} " + f"type: {InterfaceTypeChoices.TYPE_LAG}]" + ) + raise diffsync_exceptions.ObjectNotCreated + except ValidationError as err: + diffsync.job.logger.error( + f"Failed to assign lag {lag_interface} to {interface} on {interface.device}, {err}" + ) + raise diffsync_exceptions.ObjectNotCreated return super().create(diffsync, ids, attrs) def update(self, attrs): """Update and interface lag.""" - interface = Interface.objects.get(**self.get_identifiers()) + if attrs.get("lag__interface__name"): + try: + interface = Interface.objects.get(**self.get_identifiers()) + except ObjectDoesNotExist: + self.diffsync.job.logger.error( + f"Failed to assign untagged lag {attrs['lag__interface__name']}. " + f"An interface with attributes: [{self.get_identifiers}] was not found." + ) + raise diffsync_exceptions.ObjectNotUpdated try: lag_interface = Interface.objects.get( name=attrs["lag__interface__name"], device=interface.device, type=InterfaceTypeChoices.TYPE_LAG @@ -345,10 +450,12 @@ def update(self, attrs): f"with attributes [name: {attrs['lag__interface__name']}, device: {interface.device.name} " f"type: {InterfaceTypeChoices.TYPE_LAG}]" ) + raise diffsync_exceptions.ObjectNotUpdated except ValidationError as err: self.diffsync.job.logger.error( f"Failed to assign lag {lag_interface} to {interface} on {interface.device}, {err}" ) + raise diffsync_exceptions.ObjectNotUpdated return super().update(attrs) diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index b9696b07..54afe120 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -24,12 +24,9 @@ OnboardingNautobotAdapter, OnboardingNetworkAdapter, ) - - from nautobot_device_onboarding.exceptions import OnboardException from nautobot_device_onboarding.helpers import onboarding_task_fqdn_to_ip from nautobot_device_onboarding.netdev_keeper import NetdevKeeper -from nautobot_device_onboarding.utils.formatter import map_interface_type from nautobot_device_onboarding.nornir_plays.command_getter import command_getter_do, command_getter_ni PLUGIN_SETTINGS = settings.PLUGINS_CONFIG["nautobot_device_onboarding"] @@ -517,7 +514,7 @@ def __init__(self, *args, **kwargs): # from nautobot_device_onboarding.diffsync import mock_data # from nautobot_device_onboarding.utils import diffsync_utils # self.command_getter_result = mock_data.network_importer_mock_data - # self.devices_to_load = diffsync_utils.generate_device_queryset_from_command_getter_result(mock_data.network_importer_mock_data) + # self.devices_to_load = diffsync_utils.generate_device_queryset_from_command_getter_result(self.command_getter_result) # FOR TESTING ONLY, REMOVE WHEN NOT TESTING # RESTORE THESE LINES WHEN NOT TESTING! # diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index e247c61c..f4653a25 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -2,7 +2,7 @@ # pylint: disable=relative-beyond-top-level from django.conf import settings -from nautobot.dcim.models import Platform, Device +from nautobot.dcim.models import Platform from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices from nautobot.extras.models import SecretsGroup from nautobot_plugin_nornir.constants import NORNIR_SETTINGS @@ -17,9 +17,9 @@ from nautobot_device_onboarding.nornir_plays.empty_inventory import EmptyInventory from nautobot_device_onboarding.nornir_plays.logger import NornirLogger from nautobot_device_onboarding.nornir_plays.processor import ProcessorDO +from nautobot_device_onboarding.utils.formatter import format_results from nautobot_device_onboarding.utils.helper import add_platform_parsing_info from nautobot_device_onboarding.utils.inventory_creator import _set_inventory -from nautobot_device_onboarding.utils.formatter import map_interface_type, format_results InventoryPluginRegister.register("nautobot-inventory", NautobotORMInventory) InventoryPluginRegister.register("empty-inventory", EmptyInventory) diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index 620fc3d8..22f29f57 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -8,9 +8,10 @@ from django.utils.module_loading import import_string from jdiff import extract_data_from_json from jinja2.sandbox import SandboxedEnvironment +from nautobot.dcim.models import Device + from nautobot_device_onboarding.constants import INTERFACE_TYPE_MAP_STATIC from nautobot_device_onboarding.utils.jinja_filters import fix_interfaces -from nautobot.dcim.models import Device DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), "command_mappers")) @@ -125,7 +126,7 @@ def map_interface_type(interface_type): def format_ios_results(compiled_results): - "Format the results of the show commands for IOS devices." + """Format the results of the show commands for IOS devices.""" for device, device_data in compiled_results.items(): serial = Device.objects.get(name=device).serial mtu_list = device_data.get("mtu", []) @@ -187,7 +188,7 @@ def format_ios_results(compiled_results): def format_nxos_results(compiled_results): - "Format the results of the show commands for NX-OS devices." + """Format the results of the show commands for NX-OS devices.""" for device, device_data in compiled_results.items(): serial = Device.objects.get(name=device).serial mtu_list = device_data.get("mtu", []) @@ -254,8 +255,10 @@ def format_nxos_results(compiled_results): def format_junos_results(compiled_results): + """For mat the results of the show commands for Junos devices.""" return compiled_results + def format_results(compiled_results): """Format the results of the show commands for IOS devices. diff --git a/pyproject.toml b/pyproject.toml index bca6eff6..1ae424e4 100755 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,7 +30,7 @@ python = ">=3.8,<3.12" napalm = ">=2.5.0, <5" zipp = "^3.4.0" nautobot = "^2.1.1" -nautobot-ssot = "^2.2.0" +nautobot-ssot = "^2.5.0" nautobot-plugin-nornir = "2.0.0" jdiff = "^0.0.6" ntc-templates = "^4.3.0" From 7ffe1207c8c3e19ded28bf0301afb0bfc8ce5bf2 Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Fri, 22 Mar 2024 16:24:42 +0000 Subject: [PATCH 165/225] bump --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 1ae424e4..d98d8826 100755 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nautobot-device-onboarding" -version = "3.0.2a7" +version = "3.0.2a8" description = "A app for Nautobot to easily onboard new devices." authors = ["Network to Code, LLC "] license = "Apache-2.0" From fe47e908dcc78908267c086f9f3db0b077390c9a Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Fri, 22 Mar 2024 16:42:43 +0000 Subject: [PATCH 166/225] linting --- nautobot_device_onboarding/command_mappers/cisco_nxos.yml | 2 +- .../diffsync/adapters/network_importer_adapters.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml index 52dd7bff..4f59b4e1 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml @@ -65,4 +65,4 @@ network_importer: commands: - command: "show interface" use_textfsm: true - jpath: "[*].{interface: interface, mode: mode}" \ No newline at end of file + jpath: "[*].{interface: interface, mode: mode}" diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index 44bf8d4d..f859261c 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -280,7 +280,7 @@ def execute_command_getter(self): self._handle_failed_devices(device_data=result) else: self.job.logger.error( - "Data returned from CommandGetter is not the correct type. " "No devices will be onboarded" + "Data returned from CommandGetter is not the correct type. No devices will be onboarded" ) raise ValidationError("Unexpected data returend from CommandGetter.") From 1cdd65f33dfdbb0612e1e6e54451c6d0f72660fe Mon Sep 17 00:00:00 2001 From: David Cates Date: Tue, 26 Mar 2024 08:43:36 -0700 Subject: [PATCH 167/225] update lock file --- poetry.lock | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 3eb6e194..2d7f8183 100755 --- a/poetry.lock +++ b/poetry.lock @@ -2405,13 +2405,13 @@ nautobot = ["nautobot (>=2.0.0,<3.0.0)"] [[package]] name = "nautobot-ssot" -version = "2.3.0" +version = "2.5.0" description = "Nautobot Single Source of Truth" optional = false -python-versions = ">=3.8,<3.12" +python-versions = "<3.12,>=3.8" files = [ - {file = "nautobot_ssot-2.3.0-py3-none-any.whl", hash = "sha256:7db10e0b80515fda56b3ec1b2f8f3bc51c674bb59b07033433d3e32ed531b0c5"}, - {file = "nautobot_ssot-2.3.0.tar.gz", hash = "sha256:63841e4db86c1f8be2e829a489dc2884be5f36082595f823f1776131a01bd4f0"}, + {file = "nautobot_ssot-2.5.0-py3-none-any.whl", hash = "sha256:41c4243d3075f61f2e1b3f23c842ee0ac7019f9985761600e7dbc62b4105cb09"}, + {file = "nautobot_ssot-2.5.0.tar.gz", hash = "sha256:57ff7a1d503641da69ec912137f3c93d772a4e95f092ab0a65da5ab7a89448c9"}, ] [package.dependencies] @@ -4534,4 +4534,4 @@ all = [] [metadata] lock-version = "2.0" python-versions = ">=3.8,<3.12" -content-hash = "1a27531e336ada6a38d6d600445f835e0821f635efe7b73b5974c4665df5bb03" +content-hash = "110bd5243e3ce25d6cd73453bf4ba0c9f1749170acf5d633dcc652553bccb9cd" From 1271efdb23b884636bcfc7227775d9601523a8ba Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Wed, 27 Mar 2024 19:58:43 +0000 Subject: [PATCH 168/225] updates for multiple devices --- .../command_mappers/cisco_ios.yml | 15 +- .../command_mappers/cisco_nxos.yml | 5 + nautobot_device_onboarding/constants.py | 1 - nautobot_device_onboarding/utils/formatter.py | 257 +++++++++--------- pyproject.toml | 2 +- 5 files changed, 143 insertions(+), 137 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/cisco_ios.yml b/nautobot_device_onboarding/command_mappers/cisco_ios.yml index 9dcb922d..84ddb8ef 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_ios.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_ios.yml @@ -29,11 +29,11 @@ device_onboarding: jpath: "[?ip_address=='{{ obj }}'].prefix_length" post_processor: "{{ obj | unique | first }}" network_importer: - # serial: - # commands: - # - command: "show version" - # use_textfsm: true - # jpath: "[*].serial[0]" + serial: + commands: + - command: "show version" + use_textfsm: true + jpath: "[*].serial[0]" # interfaces: # interfaces: # commands: @@ -75,6 +75,11 @@ network_importer: - command: "show interfaces" use_textfsm: true jpath: "[*].{interface: interface, link_status: link_status}" + # mode: + # commands: + # - command: "show interfaces switchport" + # use_textfsm: true + # jpath: "[*].{interface: interface, mode: admin_mode}" # dot1q_mode: # commands: # # - command: "show interfaces switchport" diff --git a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml index 4f59b4e1..92a648b1 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml @@ -26,6 +26,11 @@ device_onboarding: use_textfsm: true jpath: "[?ip_address=='{{ obj }}'].prefix_length || [`31`]" network_importer: + serial: + commands: + - command: "show inventory" + use_textfsm: true + jpath: "[?name=='Chassis'].sn" type: commands: - command: "show interface" diff --git a/nautobot_device_onboarding/constants.py b/nautobot_device_onboarding/constants.py index 185d45c8..73971fce 100644 --- a/nautobot_device_onboarding/constants.py +++ b/nautobot_device_onboarding/constants.py @@ -12,7 +12,6 @@ INTERFACE_TYPE_MAP_STATIC = { "Gigabit Ethernet": "1000base-t", "Ten Gigabit Ethernet": "10gbase-t", - "Forty Gigabit Ethernet": "40gbase-t", "Ethernet SVI": "virtual", "EthernetChannel": "lag", "1000/10000 Ethernet": "1000base-t", diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index 22f29f57..9c4247f0 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -8,7 +8,7 @@ from django.utils.module_loading import import_string from jdiff import extract_data_from_json from jinja2.sandbox import SandboxedEnvironment -from nautobot.dcim.models import Device +from netutils.interface import canonical_interface_name from nautobot_device_onboarding.constants import INTERFACE_TYPE_MAP_STATIC from nautobot_device_onboarding.utils.jinja_filters import fix_interfaces @@ -125,133 +125,130 @@ def map_interface_type(interface_type): return INTERFACE_TYPE_MAP_STATIC.get(interface_type, "other") -def format_ios_results(compiled_results): +def format_ios_results(device): """Format the results of the show commands for IOS devices.""" - for device, device_data in compiled_results.items(): - serial = Device.objects.get(name=device).serial - mtu_list = device_data.get("mtu", []) - type_list = device_data.get("type", []) - ip_list = device_data.get("ip_addresses", []) - prefix_list = device_data.get("prefix_length", []) - mac_list = device_data.get("mac_address", []) - description_list = device_data.get("description", []) - link_status_list = device_data.get("link_status", []) - - interface_dict = {} - for item in mtu_list: - interface_dict.setdefault(item["interface"], {})["mtu"] = item["mtu"] - for item in type_list: - interface_type = map_interface_type(item["type"]) - interface_dict.setdefault(item["interface"], {})["type"] = interface_type - for item in ip_list: - interface_dict.setdefault(item["interface"], {})["ip_addresses"] = {"ip_address": item["ip_address"]} - for item in prefix_list: - interface_dict.setdefault(item["interface"], {}).setdefault("ip_addresses", {})["prefix_length"] = item[ - "prefix_length" - ] - for item in mac_list: - interface_dict.setdefault(item["interface"], {})["mac_address"] = item["mac_address"] - for item in description_list: - interface_dict.setdefault(item["interface"], {})["description"] = item["description"] - for item in link_status_list: - interface_dict.setdefault(item["interface"], {})["link_status"] = ( - True if item["link_status"] == "up" else False - ) - - for interface in interface_dict.values(): - interface.setdefault("802.1Q_mode", "") - interface.setdefault("lag", "") - interface.setdefault("untagged_vlan", {}) - interface.setdefault("tagged_vlans", []) - - for interface, data in interface_dict.items(): - ip_addresses = data.get("ip_addresses", {}) - if ip_addresses: - data["ip_addresses"] = [ip_addresses] - - interface_list = [] - for interface, data in interface_dict.items(): - interface_list.append({interface: data}) - - device_data["interfaces"] = interface_list - device_data["serial"] = serial - - del device_data["mtu"] - del device_data["type"] - del device_data["ip_addresses"] - del device_data["prefix_length"] - del device_data["mac_address"] - del device_data["description"] - del device_data["link_status"] - - return compiled_results - - -def format_nxos_results(compiled_results): + serial = device.get("serial") + mtu_list = device.get("mtu", []) + type_list = device.get("type", []) + ip_list = device.get("ip_addresses", []) + prefix_list = device.get("prefix_length", []) + mac_list = device.get("mac_address", []) + description_list = device.get("description", []) + link_status_list = device.get("link_status", []) + + interface_dict = {} + for item in mtu_list: + interface_dict.setdefault(item["interface"], {})["mtu"] = item["mtu"] + for item in type_list: + interface_type = map_interface_type(item["type"]) + interface_dict.setdefault(item["interface"], {})["type"] = interface_type + for item in ip_list: + interface_dict.setdefault(item["interface"], {})["ip_addresses"] = {"ip_address": item["ip_address"]} + for item in prefix_list: + interface_dict.setdefault(item["interface"], {}).setdefault("ip_addresses", {})["prefix_length"] = item[ + "prefix_length" + ] + for item in mac_list: + interface_dict.setdefault(item["interface"], {})["mac_address"] = item["mac_address"] + for item in description_list: + interface_dict.setdefault(item["interface"], {})["description"] = item["description"] + for item in link_status_list: + interface_dict.setdefault(item["interface"], {})["link_status"] = True if item["link_status"] == "up" else False + for interface in interface_dict.values(): + interface.setdefault("802.1Q_mode", "") + interface.setdefault("lag", "") + interface.setdefault("untagged_vlan", {}) + interface.setdefault("tagged_vlans", []) + + for interface, data in interface_dict.items(): + ip_addresses = data.get("ip_addresses", {}) + if ip_addresses: + data["ip_addresses"] = [ip_addresses] + + interface_list = [] + for interface, data in interface_dict.items(): + interface_list.append({canonical_interface_name(interface): data}) + + device["interfaces"] = interface_list + device["serial"] = serial + try: + del device["mtu"] + del device["type"] + del device["ip_addresses"] + del device["prefix_length"] + del device["mac_address"] + del device["description"] + del device["link_status"] + + except KeyError: + pass + + return device + + +def format_nxos_results(device): """Format the results of the show commands for NX-OS devices.""" - for device, device_data in compiled_results.items(): - serial = Device.objects.get(name=device).serial - mtu_list = device_data.get("mtu", []) - type_list = device_data.get("type", []) - ip_list = device_data.get("ip_addresses", []) - prefix_list = device_data.get("prefix_length", []) - mac_list = device_data.get("mac_address", []) - description_list = device_data.get("description", []) - link_status_list = device_data.get("link_status", []) - mode_list = device_data.get("mode", []) - interface_dict = {} - for item in mtu_list: - interface_dict.setdefault(item["interface"], {})["mtu"] = item["mtu"] - for item in type_list: - interface_type = map_interface_type(item["type"]) - interface_dict.setdefault(item["interface"], {})["type"] = interface_type - for item in ip_list: - interface_dict.setdefault(item["interface"], {})["ip_addresses"] = {"ip_address": item["ip_address"]} - for item in prefix_list: - interface_dict.setdefault(item["interface"], {}).setdefault("ip_addresses", {})["prefix_length"] = item[ - "prefix_length" - ] - for item in mac_list: - interface_dict.setdefault(item["interface"], {})["mac_address"] = item["mac_address"] - for item in description_list: - interface_dict.setdefault(item["interface"], {})["description"] = item["description"] - for item in link_status_list: - interface_dict.setdefault(item["interface"], {})["link_status"] = ( - True if item["link_status"] == "up" else False - ) - for item in mode_list: - interface_dict.setdefault(item["interface"], {})["802.1Q_mode"] = ( - "access" if item["mode"] == "access" else "tagged" if item["mode"] == "trunk" else "" - ) - - for interface in interface_dict.values(): - interface.setdefault("802.1Q_mode", "") - interface.setdefault("lag", "") - interface.setdefault("untagged_vlan", {}) - interface.setdefault("tagged_vlans", []) - - for interface, data in interface_dict.items(): - ip_addresses = data.get("ip_addresses", {}) - if ip_addresses: - data["ip_addresses"] = [ip_addresses] - - interface_list = [] - for interface, data in interface_dict.items(): - interface_list.append({interface: data}) - - device_data["interfaces"] = interface_list - device_data["serial"] = serial - - del device_data["mtu"] - del device_data["type"] - del device_data["ip_addresses"] - del device_data["prefix_length"] - del device_data["mac_address"] - del device_data["description"] - del device_data["link_status"] - del device_data["mode"] - - return compiled_results + serial = device.get("serial") + mtu_list = device.get("mtu", []) + type_list = device.get("type", []) + ip_list = device.get("ip_addresses", []) + prefix_list = device.get("prefix_length", []) + mac_list = device.get("mac_address", []) + description_list = device.get("description", []) + link_status_list = device.get("link_status", []) + mode_list = device.get("mode", []) + + interface_dict = {} + for item in mtu_list: + interface_dict.setdefault(item["interface"], {})["mtu"] = item["mtu"] + for item in type_list: + interface_type = map_interface_type(item["type"]) + interface_dict.setdefault(item["interface"], {})["type"] = interface_type + for item in ip_list: + interface_dict.setdefault(item["interface"], {})["ip_addresses"] = {"ip_address": item["ip_address"]} + for item in prefix_list: + interface_dict.setdefault(item["interface"], {}).setdefault("ip_addresses", {})["prefix_length"] = item[ + "prefix_length" + ] + for item in mac_list: + interface_dict.setdefault(item["interface"], {})["mac_address"] = item["mac_address"] + for item in description_list: + interface_dict.setdefault(item["interface"], {})["description"] = item["description"] + for item in link_status_list: + interface_dict.setdefault(item["interface"], {})["link_status"] = True if item["link_status"] == "up" else False + for item in mode_list: + interface_dict.setdefault(item["interface"], {})["802.1Q_mode"] = ( + "access" if item["mode"] == "access" else "tagged" if item["mode"] == "trunk" else "" + ) + + for interface in interface_dict.values(): + # interface.setdefault("802.1Q_mode", "") + interface.setdefault("lag", "") + interface.setdefault("untagged_vlan", {}) + interface.setdefault("tagged_vlans", []) + + for interface, data in interface_dict.items(): + ip_addresses = data.get("ip_addresses", {}) + if ip_addresses: + data["ip_addresses"] = [ip_addresses] + + interface_list = [] + for interface, data in interface_dict.items(): + interface_list.append({canonical_interface_name(interface): data}) + + device["interfaces"] = interface_list + device["serial"] = serial + + del device["mtu"] + del device["type"] + del device["ip_addresses"] + del device["prefix_length"] + del device["mac_address"] + del device["description"] + del device["link_status"] + del device["mode"] + + return device def format_junos_results(compiled_results): @@ -266,16 +263,16 @@ def format_results(compiled_results): compiled_results (dict): The compiled results from the Nornir task. Returns: - dict: The formatted results. + compiled_results (dict): The formatted results. """ for device in compiled_results: platform = compiled_results[device]["platform"] if platform in ["cisco_ios", "cisco_xe"]: - format_ios_results(compiled_results) + format_ios_results(compiled_results[device]) elif platform == "cisco_nxos": - format_nxos_results(compiled_results) + format_nxos_results(compiled_results[device]) elif platform == "juniper_junos": - format_junos_results(compiled_results) + format_junos_results(compiled_results[device]) else: raise ValueError(f"Unsupported platform {platform}") diff --git a/pyproject.toml b/pyproject.toml index d98d8826..91f0b94a 100755 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nautobot-device-onboarding" -version = "3.0.2a8" +version = "3.0.2a9" description = "A app for Nautobot to easily onboard new devices." authors = ["Network to Code, LLC "] license = "Apache-2.0" From 5b5eb26b7c9a098720e949020c20ede4bfb0a337 Mon Sep 17 00:00:00 2001 From: David Cates Date: Wed, 27 Mar 2024 13:13:14 -0700 Subject: [PATCH 169/225] update mac address conversion --- .../diffsync/adapters/network_importer_adapters.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index f859261c..d21a0bd1 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -51,9 +51,9 @@ class NetworkImporterNautobotAdapter(FilteredNautobotAdapter): def load_param_mac_address(self, parameter_name, database_object): """Convert interface mac_address to string.""" - if self.job.debug: - self.job.logger.debug(f"Converting {parameter_name}: {database_object.mac_address}") - return str(database_object.mac_address) + if database_object.mac_address: + return str(database_object.mac_address) + return "" def load_param_untagged_vlan__name(self, parameter_name, database_object): """Load, or prevent loading, untagged vlans depending on form selection.""" @@ -288,7 +288,7 @@ def _process_mac_address(self, mac_address): """Convert a mac address to match the value stored by Nautobot.""" if mac_address: return str(EUI(mac_address, version=48, dialect=MacUnixExpandedUppercase)) - return "None" + return "" def load_devices(self): """Load devices into the DiffSync store.""" From 2ba0a6bcf26714b1e3f6a82cbb7674bae66a5b21 Mon Sep 17 00:00:00 2001 From: David Cates Date: Wed, 27 Mar 2024 15:00:28 -0700 Subject: [PATCH 170/225] add cache for primary ips to NI adapter --- .../adapters/network_importer_adapters.py | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index d21a0bd1..0867c571 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -39,6 +39,8 @@ class NetworkImporterNautobotAdapter(FilteredNautobotAdapter): untagged_vlan_to_interface = network_importer_models.NetworkImporterUnTaggedVlanToInterface lag_to_interface = network_importer_models.NetworkImporterLagToInterface + primary_ips = None + top_level = [ "ip_address", "vlan", @@ -49,6 +51,17 @@ class NetworkImporterNautobotAdapter(FilteredNautobotAdapter): "lag_to_interface", ] + def _cache_primary_ips(self, device_queryset): + """ + Create a cache of primary ip address for devices. + + If the primary ip address of a device is unset due to the deletion + of an interface, this cache is used to reset it. + """ + self.primary_ips = {} + for device in device_queryset: + self.primary_ips[device.id] = device.primary_ip.id + def load_param_mac_address(self, parameter_name, database_object): """Convert interface mac_address to string.""" if database_object.mac_address: @@ -188,6 +201,8 @@ def load(self): """Generic implementation of the load function.""" if not hasattr(self, "top_level") or not self.top_level: raise ValueError("'top_level' needs to be set on the class.") + + self._cache_primary_ips(device_queryset=self.job.devices_to_load) for model_name in self.top_level: if model_name == "ip_address": @@ -207,6 +222,16 @@ def load(self): diffsync_model = self._get_diffsync_class(model_name) self._load_objects(diffsync_model) + def sync_complete(self, source: diffsync.DiffSync, *args, **kwargs): + for device in self.job.devices_to_load: + if not device.primary_ip: + try: + ip_address = IPAddress.objects.get(id=self.primary_ips[device.id]) + device.primary_ip = ip_address + device.validated_save() + except Exception as err: + self.job.logger.error(f"Unable to set Primary IP for {device.name}, {err.args}") + return super().sync_complete(source, *args, **kwargs) class MacUnixExpandedUppercase(mac_unix_expanded): """Mac Unix Expanded Uppercase.""" From 984da9aaec60f92f7923adbecf314a4b5cac5bf5 Mon Sep 17 00:00:00 2001 From: David Cates Date: Wed, 27 Mar 2024 15:12:26 -0700 Subject: [PATCH 171/225] update sync complete --- .../diffsync/adapters/network_importer_adapters.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index 0867c571..a61882d9 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -227,7 +227,7 @@ def sync_complete(self, source: diffsync.DiffSync, *args, **kwargs): if not device.primary_ip: try: ip_address = IPAddress.objects.get(id=self.primary_ips[device.id]) - device.primary_ip = ip_address + device.primary_ip4 = ip_address device.validated_save() except Exception as err: self.job.logger.error(f"Unable to set Primary IP for {device.name}, {err.args}") From 5a436de15fe4c8a9ae9ab27ecb5349654edf0ff3 Mon Sep 17 00:00:00 2001 From: David Cates Date: Wed, 27 Mar 2024 17:40:12 -0700 Subject: [PATCH 172/225] update sync complete --- .../diffsync/adapters/network_importer_adapters.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index a61882d9..d5976446 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -222,16 +222,20 @@ def load(self): diffsync_model = self._get_diffsync_class(model_name) self._load_objects(diffsync_model) - def sync_complete(self, source: diffsync.DiffSync, *args, **kwargs): - for device in self.job.devices_to_load: + def sync_complete(self, source, diff, *args, **kwargs): + for device in self.job.devices_to_load.all(): # refresh queryset after sync is complete + if self.job.debug: + self.job.logger.debug("Sync Complete method called, checking for missing primary ip addresses...") if not device.primary_ip: try: ip_address = IPAddress.objects.get(id=self.primary_ips[device.id]) device.primary_ip4 = ip_address device.validated_save() + self.job.logger.info(f"Assigning {ip_address} as primary IP Address for Device: {device.name}") except Exception as err: - self.job.logger.error(f"Unable to set Primary IP for {device.name}, {err.args}") - return super().sync_complete(source, *args, **kwargs) + self.job.logger.error(f"Unable to set Primary IP for {device.name}, {err.args}. " + "Please check the primary IP Address assignment for this device.") + return super().sync_complete(source, diff, *args, **kwargs) class MacUnixExpandedUppercase(mac_unix_expanded): """Mac Unix Expanded Uppercase.""" From 105dc536c69b9c9d83246eea321e9164d5b70dfb Mon Sep 17 00:00:00 2001 From: David Cates Date: Thu, 28 Mar 2024 08:23:42 -0700 Subject: [PATCH 173/225] update sync complete --- .../adapters/network_importer_adapters.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index d5976446..3913f858 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -227,6 +227,7 @@ def sync_complete(self, source, diff, *args, **kwargs): if self.job.debug: self.job.logger.debug("Sync Complete method called, checking for missing primary ip addresses...") if not device.primary_ip: + ip_address = "" try: ip_address = IPAddress.objects.get(id=self.primary_ips[device.id]) device.primary_ip4 = ip_address @@ -235,6 +236,20 @@ def sync_complete(self, source, diff, *args, **kwargs): except Exception as err: self.job.logger.error(f"Unable to set Primary IP for {device.name}, {err.args}. " "Please check the primary IP Address assignment for this device.") + if ip_address: + try: + interface = Interface.objects.get( + device=device, + ip_addresses__in=[ip_address] + ) + interface.mgmt_only = True + interface.validated_save() + self.job.logger.info(f"Management only set for interface: {interface.name} on device: {device.name}") + except Exception as err: + self.job.logger.error("Failed to set management only on the " + f"management interface for {device.name}, {err}, {err.args}") + else: + self.job.logger.error(f"Failed to set management only on the managmeent interface for {device.name}") return super().sync_complete(source, diff, *args, **kwargs) class MacUnixExpandedUppercase(mac_unix_expanded): From 06397f1451f22023c095e6815332123ebf28bc46 Mon Sep 17 00:00:00 2001 From: David Cates Date: Thu, 28 Mar 2024 08:33:15 -0700 Subject: [PATCH 174/225] add sync complete doc string --- .../diffsync/adapters/network_importer_adapters.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index 3913f858..57d82fd2 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -223,6 +223,15 @@ def load(self): self._load_objects(diffsync_model) def sync_complete(self, source, diff, *args, **kwargs): + """ + Assign the primary ip address to a device and update the management interface setting. + + Syncing interfaces may result in the deletion of the original management interface. If + this happens, the primary IP Address for the device should be set and the management only + option on the appropriate interface should be set to True. + + This method only runs if data was changed. + """ for device in self.job.devices_to_load.all(): # refresh queryset after sync is complete if self.job.debug: self.job.logger.debug("Sync Complete method called, checking for missing primary ip addresses...") From 823c87d6e5e4f728cc666b1f9d47d302c2df845a Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Thu, 28 Mar 2024 16:32:24 +0000 Subject: [PATCH 175/225] updated interface mapping --- nautobot_device_onboarding/constants.py | 3 +- nautobot_device_onboarding/utils/formatter.py | 30 +++++++++++-------- 2 files changed, 19 insertions(+), 14 deletions(-) diff --git a/nautobot_device_onboarding/constants.py b/nautobot_device_onboarding/constants.py index 73971fce..5f9e308f 100644 --- a/nautobot_device_onboarding/constants.py +++ b/nautobot_device_onboarding/constants.py @@ -13,8 +13,9 @@ "Gigabit Ethernet": "1000base-t", "Ten Gigabit Ethernet": "10gbase-t", "Ethernet SVI": "virtual", - "EthernetChannel": "lag", + "EtherChannel": "lag", "1000/10000 Ethernet": "1000base-t", "Port-channel": "lag", "EtherSVI": "virtual", + "FastEthernet": "100base-fx", } diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index 9c4247f0..ec43b3c4 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -165,9 +165,10 @@ def format_ios_results(device): if ip_addresses: data["ip_addresses"] = [ip_addresses] + # Convert interface names to canonical form interface_list = [] - for interface, data in interface_dict.items(): - interface_list.append({canonical_interface_name(interface): data}) + for interface_name, interface_info in interface_dict.items(): + interface_list.append({canonical_interface_name(interface_name): interface_info}) device["interfaces"] = interface_list device["serial"] = serial @@ -232,21 +233,24 @@ def format_nxos_results(device): if ip_addresses: data["ip_addresses"] = [ip_addresses] + # Convert interface names to canonical form interface_list = [] - for interface, data in interface_dict.items(): - interface_list.append({canonical_interface_name(interface): data}) + for interface_name, interface_info in interface_dict.items(): + interface_list.append({canonical_interface_name(interface_name): interface_info}) device["interfaces"] = interface_list device["serial"] = serial - - del device["mtu"] - del device["type"] - del device["ip_addresses"] - del device["prefix_length"] - del device["mac_address"] - del device["description"] - del device["link_status"] - del device["mode"] + try: + del device["mtu"] + del device["type"] + del device["ip_addresses"] + del device["prefix_length"] + del device["mac_address"] + del device["description"] + del device["link_status"] + del device["mode"] + except KeyError: + pass return device From a19676a7d70030d3460ed9596b1f6c9ad5a7b96f Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Thu, 28 Mar 2024 17:02:22 +0000 Subject: [PATCH 176/225] bump --- poetry.lock | 615 ++++++++++++++++++++----------------------------- pyproject.toml | 2 +- 2 files changed, 246 insertions(+), 371 deletions(-) diff --git a/poetry.lock b/poetry.lock index 2d7f8183..dcbb8e9a 100755 --- a/poetry.lock +++ b/poetry.lock @@ -60,13 +60,13 @@ files = [ [[package]] name = "asgiref" -version = "3.7.2" +version = "3.8.1" description = "ASGI specs, helper code, and adapters" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "asgiref-3.7.2-py3-none-any.whl", hash = "sha256:89b2ef2247e3b562a16eef663bc0e2e703ec6468e2fa8a5cd61cd449786d4f6e"}, - {file = "asgiref-3.7.2.tar.gz", hash = "sha256:9e0ce3aa93a819ba5b45120216b23878cf6e8525eb3848653452b4192b92afed"}, + {file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"}, + {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"}, ] [package.dependencies] @@ -77,22 +77,17 @@ tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] [[package]] name = "astroid" -version = "2.15.8" +version = "3.1.0" description = "An abstract syntax tree for Python with inference support." optional = false -python-versions = ">=3.7.2" +python-versions = ">=3.8.0" files = [ - {file = "astroid-2.15.8-py3-none-any.whl", hash = "sha256:1aa149fc5c6589e3d0ece885b4491acd80af4f087baafa3fb5203b113e68cd3c"}, - {file = "astroid-2.15.8.tar.gz", hash = "sha256:6c107453dffee9055899705de3c9ead36e74119cee151e5a9aaf7f0b0e020a6a"}, + {file = "astroid-3.1.0-py3-none-any.whl", hash = "sha256:951798f922990137ac090c53af473db7ab4e70c770e6d7fae0cec59f74411819"}, + {file = "astroid-3.1.0.tar.gz", hash = "sha256:ac248253bfa4bd924a0de213707e7ebeeb3138abeb48d798784ead1e56d419d4"}, ] [package.dependencies] -lazy-object-proxy = ">=1.4.0" typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} -wrapt = [ - {version = ">=1.11,<2", markers = "python_version < \"3.11\""}, - {version = ">=1.14,<2", markers = "python_version >= \"3.11\""}, -] [[package]] name = "asttokens" @@ -216,13 +211,13 @@ tzdata = ["tzdata"] [[package]] name = "bandit" -version = "1.7.7" +version = "1.7.8" description = "Security oriented static analyser for python code." optional = false python-versions = ">=3.8" files = [ - {file = "bandit-1.7.7-py3-none-any.whl", hash = "sha256:17e60786a7ea3c9ec84569fd5aee09936d116cb0cb43151023258340dbffb7ed"}, - {file = "bandit-1.7.7.tar.gz", hash = "sha256:527906bec6088cb499aae31bc962864b4e77569e9d529ee51df3a93b4b8ab28a"}, + {file = "bandit-1.7.8-py3-none-any.whl", hash = "sha256:509f7af645bc0cd8fd4587abc1a038fc795636671ee8204d502b933aee44f381"}, + {file = "bandit-1.7.8.tar.gz", hash = "sha256:36de50f720856ab24a24dbaa5fee2c66050ed97c1477e0a1159deab1775eab6b"}, ] [package.dependencies] @@ -233,6 +228,7 @@ stevedore = ">=1.20.0" [package.extras] baseline = ["GitPython (>=3.1.30)"] +sarif = ["jschema-to-python (>=1.2.3)", "sarif-om (>=1.0.4)"] test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)"] toml = ["tomli (>=1.1.0)"] yaml = ["PyYAML"] @@ -290,33 +286,33 @@ files = [ [[package]] name = "black" -version = "24.2.0" +version = "24.3.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-24.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6981eae48b3b33399c8757036c7f5d48a535b962a7c2310d19361edeef64ce29"}, - {file = "black-24.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d533d5e3259720fdbc1b37444491b024003e012c5173f7d06825a77508085430"}, - {file = "black-24.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61a0391772490ddfb8a693c067df1ef5227257e72b0e4108482b8d41b5aee13f"}, - {file = "black-24.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:992e451b04667116680cb88f63449267c13e1ad134f30087dec8527242e9862a"}, - {file = "black-24.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:163baf4ef40e6897a2a9b83890e59141cc8c2a98f2dda5080dc15c00ee1e62cd"}, - {file = "black-24.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e37c99f89929af50ffaf912454b3e3b47fd64109659026b678c091a4cd450fb2"}, - {file = "black-24.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9de21bafcba9683853f6c96c2d515e364aee631b178eaa5145fc1c61a3cc92"}, - {file = "black-24.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:9db528bccb9e8e20c08e716b3b09c6bdd64da0dd129b11e160bf082d4642ac23"}, - {file = "black-24.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d84f29eb3ee44859052073b7636533ec995bd0f64e2fb43aeceefc70090e752b"}, - {file = "black-24.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e08fb9a15c914b81dd734ddd7fb10513016e5ce7e6704bdd5e1251ceee51ac9"}, - {file = "black-24.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:810d445ae6069ce64030c78ff6127cd9cd178a9ac3361435708b907d8a04c693"}, - {file = "black-24.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:ba15742a13de85e9b8f3239c8f807723991fbfae24bad92d34a2b12e81904982"}, - {file = "black-24.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e53a8c630f71db01b28cd9602a1ada68c937cbf2c333e6ed041390d6968faf4"}, - {file = "black-24.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:93601c2deb321b4bad8f95df408e3fb3943d85012dddb6121336b8e24a0d1218"}, - {file = "black-24.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0057f800de6acc4407fe75bb147b0c2b5cbb7c3ed110d3e5999cd01184d53b0"}, - {file = "black-24.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:faf2ee02e6612577ba0181f4347bcbcf591eb122f7841ae5ba233d12c39dcb4d"}, - {file = "black-24.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:057c3dc602eaa6fdc451069bd027a1b2635028b575a6c3acfd63193ced20d9c8"}, - {file = "black-24.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:08654d0797e65f2423f850fc8e16a0ce50925f9337fb4a4a176a7aa4026e63f8"}, - {file = "black-24.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca610d29415ee1a30a3f30fab7a8f4144e9d34c89a235d81292a1edb2b55f540"}, - {file = "black-24.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:4dd76e9468d5536abd40ffbc7a247f83b2324f0c050556d9c371c2b9a9a95e31"}, - {file = "black-24.2.0-py3-none-any.whl", hash = "sha256:e8a6ae970537e67830776488bca52000eaa37fa63b9988e8c487458d9cd5ace6"}, - {file = "black-24.2.0.tar.gz", hash = "sha256:bce4f25c27c3435e4dace4815bcb2008b87e167e3bf4ee47ccdc5ce906eb4894"}, + {file = "black-24.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7d5e026f8da0322b5662fa7a8e752b3fa2dac1c1cbc213c3d7ff9bdd0ab12395"}, + {file = "black-24.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9f50ea1132e2189d8dff0115ab75b65590a3e97de1e143795adb4ce317934995"}, + {file = "black-24.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2af80566f43c85f5797365077fb64a393861a3730bd110971ab7a0c94e873e7"}, + {file = "black-24.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:4be5bb28e090456adfc1255e03967fb67ca846a03be7aadf6249096100ee32d0"}, + {file = "black-24.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f1373a7808a8f135b774039f61d59e4be7eb56b2513d3d2f02a8b9365b8a8a9"}, + {file = "black-24.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aadf7a02d947936ee418777e0247ea114f78aff0d0959461057cae8a04f20597"}, + {file = "black-24.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c02e4ea2ae09d16314d30912a58ada9a5c4fdfedf9512d23326128ac08ac3d"}, + {file = "black-24.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:bf21b7b230718a5f08bd32d5e4f1db7fc8788345c8aea1d155fc17852b3410f5"}, + {file = "black-24.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2818cf72dfd5d289e48f37ccfa08b460bf469e67fb7c4abb07edc2e9f16fb63f"}, + {file = "black-24.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4acf672def7eb1725f41f38bf6bf425c8237248bb0804faa3965c036f7672d11"}, + {file = "black-24.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7ed6668cbbfcd231fa0dc1b137d3e40c04c7f786e626b405c62bcd5db5857e4"}, + {file = "black-24.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:56f52cfbd3dabe2798d76dbdd299faa046a901041faf2cf33288bc4e6dae57b5"}, + {file = "black-24.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:79dcf34b33e38ed1b17434693763301d7ccbd1c5860674a8f871bd15139e7837"}, + {file = "black-24.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e19cb1c6365fd6dc38a6eae2dcb691d7d83935c10215aef8e6c38edee3f77abd"}, + {file = "black-24.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b76c275e4c1c5ce6e9870911384bff5ca31ab63d19c76811cb1fb162678213"}, + {file = "black-24.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:b5991d523eee14756f3c8d5df5231550ae8993e2286b8014e2fdea7156ed0959"}, + {file = "black-24.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c45f8dff244b3c431b36e3224b6be4a127c6aca780853574c00faf99258041eb"}, + {file = "black-24.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6905238a754ceb7788a73f02b45637d820b2f5478b20fec82ea865e4f5d4d9f7"}, + {file = "black-24.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7de8d330763c66663661a1ffd432274a2f92f07feeddd89ffd085b5744f85e7"}, + {file = "black-24.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:7bb041dca0d784697af4646d3b62ba4a6b028276ae878e53f6b4f74ddd6db99f"}, + {file = "black-24.3.0-py3-none-any.whl", hash = "sha256:41622020d7120e01d377f74249e677039d20e6344ff5851de8a10f11f513bf93"}, + {file = "black-24.3.0.tar.gz", hash = "sha256:a0c9c4a0771afc6919578cec71ce82a3e31e054904e7197deacbc9382671c41f"}, ] [package.dependencies] @@ -597,13 +593,13 @@ test = ["pytest"] [[package]] name = "click-didyoumean" -version = "0.3.0" +version = "0.3.1" description = "Enables git-like *did-you-mean* feature in click" optional = false -python-versions = ">=3.6.2,<4.0.0" +python-versions = ">=3.6.2" files = [ - {file = "click-didyoumean-0.3.0.tar.gz", hash = "sha256:f184f0d851d96b6d29297354ed981b7dd71df7ff500d82fa6d11f0856bee8035"}, - {file = "click_didyoumean-0.3.0-py3-none-any.whl", hash = "sha256:a0713dc7a1de3f06bc0df5a9567ad19ead2d3d5689b434768a6145bff77c0667"}, + {file = "click_didyoumean-0.3.1-py3-none-any.whl", hash = "sha256:5c4bb6007cfea5f2fd6583a2fb6701a22a41eb98957e63d0fac41c10e7c3117c"}, + {file = "click_didyoumean-0.3.1.tar.gz", hash = "sha256:4f82fdff0dbe64ef8ab2279bd6aa3f6a99c3b28c05aa09cbfc07c9d7fbb5a463"}, ] [package.dependencies] @@ -657,63 +653,63 @@ files = [ [[package]] name = "coverage" -version = "7.4.3" +version = "7.4.4" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8580b827d4746d47294c0e0b92854c85a92c2227927433998f0d3320ae8a71b6"}, - {file = "coverage-7.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:718187eeb9849fc6cc23e0d9b092bc2348821c5e1a901c9f8975df0bc785bfd4"}, - {file = "coverage-7.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:767b35c3a246bcb55b8044fd3a43b8cd553dd1f9f2c1eeb87a302b1f8daa0524"}, - {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae7f19afe0cce50039e2c782bff379c7e347cba335429678450b8fe81c4ef96d"}, - {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba3a8aaed13770e970b3df46980cb068d1c24af1a1968b7818b69af8c4347efb"}, - {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ee866acc0861caebb4f2ab79f0b94dbfbdbfadc19f82e6e9c93930f74e11d7a0"}, - {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:506edb1dd49e13a2d4cac6a5173317b82a23c9d6e8df63efb4f0380de0fbccbc"}, - {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd6545d97c98a192c5ac995d21c894b581f1fd14cf389be90724d21808b657e2"}, - {file = "coverage-7.4.3-cp310-cp310-win32.whl", hash = "sha256:f6a09b360d67e589236a44f0c39218a8efba2593b6abdccc300a8862cffc2f94"}, - {file = "coverage-7.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:18d90523ce7553dd0b7e23cbb28865db23cddfd683a38fb224115f7826de78d0"}, - {file = "coverage-7.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbbe5e739d45a52f3200a771c6d2c7acf89eb2524890a4a3aa1a7fa0695d2a47"}, - {file = "coverage-7.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:489763b2d037b164846ebac0cbd368b8a4ca56385c4090807ff9fad817de4113"}, - {file = "coverage-7.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:451f433ad901b3bb00184d83fd83d135fb682d780b38af7944c9faeecb1e0bfe"}, - {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcc66e222cf4c719fe7722a403888b1f5e1682d1679bd780e2b26c18bb648cdc"}, - {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ec74cfef2d985e145baae90d9b1b32f85e1741b04cd967aaf9cfa84c1334f3"}, - {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:abbbd8093c5229c72d4c2926afaee0e6e3140de69d5dcd918b2921f2f0c8baba"}, - {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:35eb581efdacf7b7422af677b92170da4ef34500467381e805944a3201df2079"}, - {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8249b1c7334be8f8c3abcaaa996e1e4927b0e5a23b65f5bf6cfe3180d8ca7840"}, - {file = "coverage-7.4.3-cp311-cp311-win32.whl", hash = "sha256:cf30900aa1ba595312ae41978b95e256e419d8a823af79ce670835409fc02ad3"}, - {file = "coverage-7.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:18c7320695c949de11a351742ee001849912fd57e62a706d83dfc1581897fa2e"}, - {file = "coverage-7.4.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b51bfc348925e92a9bd9b2e48dad13431b57011fd1038f08316e6bf1df107d10"}, - {file = "coverage-7.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d6cdecaedea1ea9e033d8adf6a0ab11107b49571bbb9737175444cea6eb72328"}, - {file = "coverage-7.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b2eccb883368f9e972e216c7b4c7c06cabda925b5f06dde0650281cb7666a30"}, - {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c00cdc8fa4e50e1cc1f941a7f2e3e0f26cb2a1233c9696f26963ff58445bac7"}, - {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a4a8dd3dcf4cbd3165737358e4d7dfbd9d59902ad11e3b15eebb6393b0446e"}, - {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:062b0a75d9261e2f9c6d071753f7eef0fc9caf3a2c82d36d76667ba7b6470003"}, - {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ebe7c9e67a2d15fa97b77ea6571ce5e1e1f6b0db71d1d5e96f8d2bf134303c1d"}, - {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c0a120238dd71c68484f02562f6d446d736adcc6ca0993712289b102705a9a3a"}, - {file = "coverage-7.4.3-cp312-cp312-win32.whl", hash = "sha256:37389611ba54fd6d278fde86eb2c013c8e50232e38f5c68235d09d0a3f8aa352"}, - {file = "coverage-7.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:d25b937a5d9ffa857d41be042b4238dd61db888533b53bc76dc082cb5a15e914"}, - {file = "coverage-7.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:28ca2098939eabab044ad68850aac8f8db6bf0b29bc7f2887d05889b17346454"}, - {file = "coverage-7.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:280459f0a03cecbe8800786cdc23067a8fc64c0bd51dc614008d9c36e1659d7e"}, - {file = "coverage-7.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c0cdedd3500e0511eac1517bf560149764b7d8e65cb800d8bf1c63ebf39edd2"}, - {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a9babb9466fe1da12417a4aed923e90124a534736de6201794a3aea9d98484e"}, - {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dec9de46a33cf2dd87a5254af095a409ea3bf952d85ad339751e7de6d962cde6"}, - {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:16bae383a9cc5abab9bb05c10a3e5a52e0a788325dc9ba8499e821885928968c"}, - {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2c854ce44e1ee31bda4e318af1dbcfc929026d12c5ed030095ad98197eeeaed0"}, - {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ce8c50520f57ec57aa21a63ea4f325c7b657386b3f02ccaedeccf9ebe27686e1"}, - {file = "coverage-7.4.3-cp38-cp38-win32.whl", hash = "sha256:708a3369dcf055c00ddeeaa2b20f0dd1ce664eeabde6623e516c5228b753654f"}, - {file = "coverage-7.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:1bf25fbca0c8d121a3e92a2a0555c7e5bc981aee5c3fdaf4bb7809f410f696b9"}, - {file = "coverage-7.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b253094dbe1b431d3a4ac2f053b6d7ede2664ac559705a704f621742e034f1f"}, - {file = "coverage-7.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77fbfc5720cceac9c200054b9fab50cb2a7d79660609200ab83f5db96162d20c"}, - {file = "coverage-7.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6679060424faa9c11808598504c3ab472de4531c571ab2befa32f4971835788e"}, - {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4af154d617c875b52651dd8dd17a31270c495082f3d55f6128e7629658d63765"}, - {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8640f1fde5e1b8e3439fe482cdc2b0bb6c329f4bb161927c28d2e8879c6029ee"}, - {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:69b9f6f66c0af29642e73a520b6fed25ff9fd69a25975ebe6acb297234eda501"}, - {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0842571634f39016a6c03e9d4aba502be652a6e4455fadb73cd3a3a49173e38f"}, - {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a78ed23b08e8ab524551f52953a8a05d61c3a760781762aac49f8de6eede8c45"}, - {file = "coverage-7.4.3-cp39-cp39-win32.whl", hash = "sha256:c0524de3ff096e15fcbfe8f056fdb4ea0bf497d584454f344d59fce069d3e6e9"}, - {file = "coverage-7.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:0209a6369ccce576b43bb227dc8322d8ef9e323d089c6f3f26a597b09cb4d2aa"}, - {file = "coverage-7.4.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:7cbde573904625509a3f37b6fecea974e363460b556a627c60dc2f47e2fffa51"}, - {file = "coverage-7.4.3.tar.gz", hash = "sha256:276f6077a5c61447a48d133ed13e759c09e62aff0dc84274a68dc18660104d52"}, + {file = "coverage-7.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0be5efd5127542ef31f165de269f77560d6cdef525fffa446de6f7e9186cfb2"}, + {file = "coverage-7.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccd341521be3d1b3daeb41960ae94a5e87abe2f46f17224ba5d6f2b8398016cf"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fa497a8ab37784fbb20ab699c246053ac294d13fc7eb40ec007a5043ec91f8"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1a93009cb80730c9bca5d6d4665494b725b6e8e157c1cb7f2db5b4b122ea562"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:690db6517f09336559dc0b5f55342df62370a48f5469fabf502db2c6d1cffcd2"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:09c3255458533cb76ef55da8cc49ffab9e33f083739c8bd4f58e79fecfe288f7"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ce1415194b4a6bd0cdcc3a1dfbf58b63f910dcb7330fe15bdff542c56949f87"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b91cbc4b195444e7e258ba27ac33769c41b94967919f10037e6355e998af255c"}, + {file = "coverage-7.4.4-cp310-cp310-win32.whl", hash = "sha256:598825b51b81c808cb6f078dcb972f96af96b078faa47af7dfcdf282835baa8d"}, + {file = "coverage-7.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:09ef9199ed6653989ebbcaacc9b62b514bb63ea2f90256e71fea3ed74bd8ff6f"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f9f50e7ef2a71e2fae92774c99170eb8304e3fdf9c8c3c7ae9bab3e7229c5cf"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:623512f8ba53c422fcfb2ce68362c97945095b864cda94a92edbaf5994201083"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0513b9508b93da4e1716744ef6ebc507aff016ba115ffe8ecff744d1322a7b63"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40209e141059b9370a2657c9b15607815359ab3ef9918f0196b6fccce8d3230f"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a2b2b78c78293782fd3767d53e6474582f62443d0504b1554370bde86cc8227"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:73bfb9c09951125d06ee473bed216e2c3742f530fc5acc1383883125de76d9cd"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f384c3cc76aeedce208643697fb3e8437604b512255de6d18dae3f27655a384"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54eb8d1bf7cacfbf2a3186019bcf01d11c666bd495ed18717162f7eb1e9dd00b"}, + {file = "coverage-7.4.4-cp311-cp311-win32.whl", hash = "sha256:cac99918c7bba15302a2d81f0312c08054a3359eaa1929c7e4b26ebe41e9b286"}, + {file = "coverage-7.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:b14706df8b2de49869ae03a5ccbc211f4041750cd4a66f698df89d44f4bd30ec"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:201bef2eea65e0e9c56343115ba3814e896afe6d36ffd37bab783261db430f76"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41c9c5f3de16b903b610d09650e5e27adbfa7f500302718c9ffd1c12cf9d6818"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d898fe162d26929b5960e4e138651f7427048e72c853607f2b200909794ed978"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ea79bb50e805cd6ac058dfa3b5c8f6c040cb87fe83de10845857f5535d1db70"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b94265ca988c3f8e479e741693d143026632672e3ff924f25fab50518dd51"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00838a35b882694afda09f85e469c96367daa3f3f2b097d846a7216993d37f4c"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fdfafb32984684eb03c2d83e1e51f64f0906b11e64482df3c5db936ce3839d48"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:69eb372f7e2ece89f14751fbcbe470295d73ed41ecd37ca36ed2eb47512a6ab9"}, + {file = "coverage-7.4.4-cp312-cp312-win32.whl", hash = "sha256:137eb07173141545e07403cca94ab625cc1cc6bc4c1e97b6e3846270e7e1fea0"}, + {file = "coverage-7.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d71eec7d83298f1af3326ce0ff1d0ea83c7cb98f72b577097f9083b20bdaf05e"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ae728ff3b5401cc320d792866987e7e7e880e6ebd24433b70a33b643bb0384"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc4f1358cb0c78edef3ed237ef2c86056206bb8d9140e73b6b89fbcfcbdd40e1"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8130a2aa2acb8788e0b56938786c33c7c98562697bf9f4c7d6e8e5e3a0501e4a"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf271892d13e43bc2b51e6908ec9a6a5094a4df1d8af0bfc360088ee6c684409"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4cdc86d54b5da0df6d3d3a2f0b710949286094c3a6700c21e9015932b81447e"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae71e7ddb7a413dd60052e90528f2f65270aad4b509563af6d03d53e979feafd"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:38dd60d7bf242c4ed5b38e094baf6401faa114fc09e9e6632374388a404f98e7"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa5b1c1bfc28384f1f53b69a023d789f72b2e0ab1b3787aae16992a7ca21056c"}, + {file = "coverage-7.4.4-cp38-cp38-win32.whl", hash = "sha256:dfa8fe35a0bb90382837b238fff375de15f0dcdb9ae68ff85f7a63649c98527e"}, + {file = "coverage-7.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:b2991665420a803495e0b90a79233c1433d6ed77ef282e8e152a324bbbc5e0c8"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b799445b9f7ee8bf299cfaed6f5b226c0037b74886a4e11515e569b36fe310d"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b4d33f418f46362995f1e9d4f3a35a1b6322cb959c31d88ae56b0298e1c22357"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aadacf9a2f407a4688d700e4ebab33a7e2e408f2ca04dbf4aef17585389eff3e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c95949560050d04d46b919301826525597f07b33beba6187d04fa64d47ac82e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff7687ca3d7028d8a5f0ebae95a6e4827c5616b31a4ee1192bdfde697db110d4"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fc1de20b2d4a061b3df27ab9b7c7111e9a710f10dc2b84d33a4ab25065994ec"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c74880fc64d4958159fbd537a091d2a585448a8f8508bf248d72112723974cbd"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:742a76a12aa45b44d236815d282b03cfb1de3b4323f3e4ec933acfae08e54ade"}, + {file = "coverage-7.4.4-cp39-cp39-win32.whl", hash = "sha256:d89d7b2974cae412400e88f35d86af72208e1ede1a541954af5d944a8ba46c57"}, + {file = "coverage-7.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:9ca28a302acb19b6af89e90f33ee3e1906961f94b54ea37de6737b7ca9d8827c"}, + {file = "coverage-7.4.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b2c5edc4ac10a7ef6605a966c58929ec6c1bd0917fb8c15cb3363f65aa40e677"}, + {file = "coverage-7.4.4.tar.gz", hash = "sha256:c901df83d097649e257e803be22592aedfd5182f07b3cc87d640bbb9afd50f49"}, ] [package.extras] @@ -865,13 +861,13 @@ profile = ["gprof2dot (>=2022.7.29)"] [[package]] name = "django" -version = "3.2.24" +version = "3.2.25" description = "A high-level Python Web framework that encourages rapid development and clean, pragmatic design." optional = false python-versions = ">=3.6" files = [ - {file = "Django-3.2.24-py3-none-any.whl", hash = "sha256:5dd5b787c3ba39637610fe700f54bf158e33560ea0dba600c19921e7ff926ec5"}, - {file = "Django-3.2.24.tar.gz", hash = "sha256:aaee9fb0fb4ebd4311520887ad2e33313d368846607f82a9a0ed461cd4c35b18"}, + {file = "Django-3.2.25-py3-none-any.whl", hash = "sha256:a52ea7fcf280b16f7b739cec38fa6d3f8953a5456986944c3ca97e79882b4e38"}, + {file = "Django-3.2.25.tar.gz", hash = "sha256:7ca38a78654aee72378594d63e51636c04b8e28574f5505dff630895b5472777"}, ] [package.dependencies] @@ -1253,13 +1249,13 @@ sidecar = ["drf-spectacular-sidecar"] [[package]] name = "drf-spectacular-sidecar" -version = "2024.2.1" +version = "2024.3.4" description = "Serve self-contained distribution builds of Swagger UI and Redoc with Django" optional = false python-versions = ">=3.6" files = [ - {file = "drf-spectacular-sidecar-2024.2.1.tar.gz", hash = "sha256:db95a38971c9be09986356f82041fac60183d28ebdf60c0c51eb8c1f86da3937"}, - {file = "drf_spectacular_sidecar-2024.2.1-py3-none-any.whl", hash = "sha256:dc819ef7a35448c18b2bf4273b38fe1468e14daea5fc8675afb5d0f9e6d9a0ba"}, + {file = "drf-spectacular-sidecar-2024.3.4.tar.gz", hash = "sha256:101449802421606a2de8be0e27c52e5e7bae14a6d99e1a7ab27eddb659fb8676"}, + {file = "drf_spectacular_sidecar-2024.3.4-py3-none-any.whl", hash = "sha256:71db685ab4fae50f33261c86f5cfd1ae9b3cca72bc0426ed91868121d041be24"}, ] [package.dependencies] @@ -1487,13 +1483,13 @@ six = ">=1.12" [[package]] name = "griffe" -version = "0.41.0" +version = "0.42.1" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." optional = false python-versions = ">=3.8" files = [ - {file = "griffe-0.41.0-py3-none-any.whl", hash = "sha256:8aa7fc6eb00cb80af9c0198178c6b7110cb59fa2c5187bb13ea25eebbe4dd928"}, - {file = "griffe-0.41.0.tar.gz", hash = "sha256:850128c3198c18713eaf0a6cc8572e590a16b1965f72a4e871e66cf84740903f"}, + {file = "griffe-0.42.1-py3-none-any.whl", hash = "sha256:7e805e35617601355edcac0d3511cedc1ed0cb1f7645e2d336ae4b05bbae7b3b"}, + {file = "griffe-0.42.1.tar.gz", hash = "sha256:57046131384043ed078692b85d86b76568a686266cc036b9b56b704466f803ce"}, ] [package.dependencies] @@ -1513,39 +1509,40 @@ files = [ [[package]] name = "httpcore" -version = "0.17.3" +version = "1.0.5" description = "A minimal low-level HTTP client." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "httpcore-0.17.3-py3-none-any.whl", hash = "sha256:c2789b767ddddfa2a5782e3199b2b7f6894540b17b16ec26b2c4d8e103510b87"}, - {file = "httpcore-0.17.3.tar.gz", hash = "sha256:a6f30213335e34c1ade7be6ec7c47f19f50c56db36abef1a9dfa3815b1cb3888"}, + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, ] [package.dependencies] -anyio = ">=3.0,<5.0" certifi = "*" h11 = ">=0.13,<0.15" -sniffio = "==1.*" [package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.26.0)"] [[package]] name = "httpx" -version = "0.24.1" +version = "0.27.0" description = "The next generation HTTP client." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "httpx-0.24.1-py3-none-any.whl", hash = "sha256:06781eb9ac53cde990577af654bd990a4949de37a28bdb4a230d434f3a30b9bd"}, - {file = "httpx-0.24.1.tar.gz", hash = "sha256:5853a43053df830c20f8110c5e69fe44d035d850b2dfe795e196f00fdb774bdd"}, + {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, + {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, ] [package.dependencies] +anyio = "*" certifi = "*" -httpcore = ">=0.15.0,<0.18.0" +httpcore = "==1.*" idna = "*" sniffio = "*" @@ -1587,21 +1584,21 @@ testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packag [[package]] name = "importlib-resources" -version = "5.13.0" +version = "6.4.0" description = "Read resources from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_resources-5.13.0-py3-none-any.whl", hash = "sha256:9f7bd0c97b79972a6cce36a366356d16d5e13b09679c11a58f1014bfdf8e64b2"}, - {file = "importlib_resources-5.13.0.tar.gz", hash = "sha256:82d5c6cca930697dbbd86c93333bb2c2e72861d4789a11c2662b933e5ad2b528"}, + {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"}, + {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, ] [package.dependencies] zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] [[package]] name = "incremental" @@ -1819,13 +1816,13 @@ yamlordereddictloader = "*" [[package]] name = "kombu" -version = "5.3.5" +version = "5.3.6" description = "Messaging library for Python." optional = false python-versions = ">=3.8" files = [ - {file = "kombu-5.3.5-py3-none-any.whl", hash = "sha256:0eac1bbb464afe6fb0924b21bf79460416d25d8abc52546d4f16cad94f789488"}, - {file = "kombu-5.3.5.tar.gz", hash = "sha256:30e470f1a6b49c70dc6f6d13c3e4cc4e178aa6c469ceb6bcd55645385fc84b93"}, + {file = "kombu-5.3.6-py3-none-any.whl", hash = "sha256:49f1e62b12369045de2662f62cc584e7df83481a513db83b01f87b5b9785e378"}, + {file = "kombu-5.3.6.tar.gz", hash = "sha256:f3da5b570a147a5da8280180aa80b03807283d63ea5081fcdb510d18242431d9"}, ] [package.dependencies] @@ -1844,59 +1841,13 @@ mongodb = ["pymongo (>=4.1.1)"] msgpack = ["msgpack"] pyro = ["pyro4"] qpid = ["qpid-python (>=0.26)", "qpid-tools (>=0.26)"] -redis = ["redis (>=4.5.2,!=4.5.5,<6.0.0)"] +redis = ["redis (>=4.5.2,!=4.5.5,!=5.0.2)"] slmq = ["softlayer-messaging (>=1.0.3)"] sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"] sqs = ["boto3 (>=1.26.143)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"] yaml = ["PyYAML (>=3.10)"] zookeeper = ["kazoo (>=2.8.0)"] -[[package]] -name = "lazy-object-proxy" -version = "1.10.0" -description = "A fast and thorough lazy object proxy." -optional = false -python-versions = ">=3.8" -files = [ - {file = "lazy-object-proxy-1.10.0.tar.gz", hash = "sha256:78247b6d45f43a52ef35c25b5581459e85117225408a4128a3daf8bf9648ac69"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:855e068b0358ab916454464a884779c7ffa312b8925c6f7401e952dcf3b89977"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab7004cf2e59f7c2e4345604a3e6ea0d92ac44e1c2375527d56492014e690c3"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc0d2fc424e54c70c4bc06787e4072c4f3b1aa2f897dfdc34ce1013cf3ceef05"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e2adb09778797da09d2b5ebdbceebf7dd32e2c96f79da9052b2e87b6ea495895"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1f711e2c6dcd4edd372cf5dec5c5a30d23bba06ee012093267b3376c079ec83"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-win32.whl", hash = "sha256:76a095cfe6045c7d0ca77db9934e8f7b71b14645f0094ffcd842349ada5c5fb9"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:b4f87d4ed9064b2628da63830986c3d2dca7501e6018347798313fcf028e2fd4"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fec03caabbc6b59ea4a638bee5fce7117be8e99a4103d9d5ad77f15d6f81020c"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02c83f957782cbbe8136bee26416686a6ae998c7b6191711a04da776dc9e47d4"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009e6bb1f1935a62889ddc8541514b6a9e1fcf302667dcb049a0be5c8f613e56"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75fc59fc450050b1b3c203c35020bc41bd2695ed692a392924c6ce180c6f1dc9"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:782e2c9b2aab1708ffb07d4bf377d12901d7a1d99e5e410d648d892f8967ab1f"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-win32.whl", hash = "sha256:edb45bb8278574710e68a6b021599a10ce730d156e5b254941754a9cc0b17d03"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:e271058822765ad5e3bca7f05f2ace0de58a3f4e62045a8c90a0dfd2f8ad8cc6"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e98c8af98d5707dcdecc9ab0863c0ea6e88545d42ca7c3feffb6b4d1e370c7ba"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:952c81d415b9b80ea261d2372d2a4a2332a3890c2b83e0535f263ddfe43f0d43"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80b39d3a151309efc8cc48675918891b865bdf742a8616a337cb0090791a0de9"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e221060b701e2aa2ea991542900dd13907a5c90fa80e199dbf5a03359019e7a3"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:92f09ff65ecff3108e56526f9e2481b8116c0b9e1425325e13245abfd79bdb1b"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-win32.whl", hash = "sha256:3ad54b9ddbe20ae9f7c1b29e52f123120772b06dbb18ec6be9101369d63a4074"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:127a789c75151db6af398b8972178afe6bda7d6f68730c057fbbc2e96b08d282"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4ed0518a14dd26092614412936920ad081a424bdcb54cc13349a8e2c6d106a"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ad9e6ed739285919aa9661a5bbed0aaf410aa60231373c5579c6b4801bd883c"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc0a92c02fa1ca1e84fc60fa258458e5bf89d90a1ddaeb8ed9cc3147f417255"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0aefc7591920bbd360d57ea03c995cebc204b424524a5bd78406f6e1b8b2a5d8"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5faf03a7d8942bb4476e3b62fd0f4cf94eaf4618e304a19865abf89a35c0bbee"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-win32.whl", hash = "sha256:e333e2324307a7b5d86adfa835bb500ee70bfcd1447384a822e96495796b0ca4"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:cb73507defd385b7705c599a94474b1d5222a508e502553ef94114a143ec6696"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:366c32fe5355ef5fc8a232c5436f4cc66e9d3e8967c01fb2e6302fd6627e3d94"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2297f08f08a2bb0d32a4265e98a006643cd7233fb7983032bd61ac7a02956b3b"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18dd842b49456aaa9a7cf535b04ca4571a302ff72ed8740d06b5adcd41fe0757"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:217138197c170a2a74ca0e05bddcd5f1796c735c37d0eee33e43259b192aa424"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a3a87cf1e133e5b1994144c12ca4aa3d9698517fe1e2ca82977781b16955658"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-win32.whl", hash = "sha256:30b339b2a743c5288405aa79a69e706a06e02958eab31859f7f3c04980853b70"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:a899b10e17743683b293a729d3a11f2f399e8a90c73b089e29f5d0fe3509f0dd"}, - {file = "lazy_object_proxy-1.10.0-pp310.pp311.pp312.pp38.pp39-none-any.whl", hash = "sha256:80fa48bd89c8f2f456fc0765c11c23bf5af827febacd2f523ca5bc1893fcc09d"}, -] - [[package]] name = "lxml" version = "5.1.0" @@ -1992,19 +1943,20 @@ source = ["Cython (>=3.0.7)"] [[package]] name = "markdown" -version = "3.3.7" -description = "Python implementation of Markdown." +version = "3.5.2" +description = "Python implementation of John Gruber's Markdown." optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "Markdown-3.3.7-py3-none-any.whl", hash = "sha256:f5da449a6e1c989a4cea2631aa8ee67caa5a2ef855d551c88f9e309f4634c621"}, - {file = "Markdown-3.3.7.tar.gz", hash = "sha256:cbb516f16218e643d8e0a95b309f77eb118cb138d39a4f27851e6a63581db874"}, + {file = "Markdown-3.5.2-py3-none-any.whl", hash = "sha256:d43323865d89fc0cb9b20c75fc8ad313af307cc087e84b657d9eec768eddeadd"}, + {file = "Markdown-3.5.2.tar.gz", hash = "sha256:e1ac7b3dc550ee80e602e71c1d168002f062e49f1b11e26a36264dafd4df2ef8"}, ] [package.dependencies] importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} [package.extras] +docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] testing = ["coverage", "pyyaml"] [[package]] @@ -2180,17 +2132,18 @@ min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-imp [[package]] name = "mkdocs-autorefs" -version = "0.5.0" +version = "1.0.1" description = "Automatically link across pages in MkDocs." optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_autorefs-0.5.0-py3-none-any.whl", hash = "sha256:7930fcb8ac1249f10e683967aeaddc0af49d90702af111a5e390e8b20b3d97ff"}, - {file = "mkdocs_autorefs-0.5.0.tar.gz", hash = "sha256:9a5054a94c08d28855cfab967ada10ed5be76e2bfad642302a610b252c3274c0"}, + {file = "mkdocs_autorefs-1.0.1-py3-none-any.whl", hash = "sha256:aacdfae1ab197780fb7a2dac92ad8a3d8f7ca8049a9cbe56a4218cd52e8da570"}, + {file = "mkdocs_autorefs-1.0.1.tar.gz", hash = "sha256:f684edf847eced40b570b57846b15f0bf57fb93ac2c510450775dcf16accb971"}, ] [package.dependencies] Markdown = ">=3.3" +markupsafe = ">=2.0.1" mkdocs = ">=1.1" [[package]] @@ -2323,13 +2276,13 @@ typing-extensions = ">=4.3.0" [[package]] name = "nautobot" -version = "2.1.5" +version = "2.2.0b1" description = "Source of truth and network automation platform." optional = false -python-versions = ">=3.8,<3.12" +python-versions = "<3.12,>=3.8" files = [ - {file = "nautobot-2.1.5-py3-none-any.whl", hash = "sha256:62ab0115349f7c75c675f93324644e519d5fde5dd0c7ed3baa40a1c20d45118a"}, - {file = "nautobot-2.1.5.tar.gz", hash = "sha256:4c36d03592210c17bc899dd9cfcb05c26ad720727e6869d928540611aefa2f55"}, + {file = "nautobot-2.2.0b1-py3-none-any.whl", hash = "sha256:cf91cf2dbc880a2491b45e943d6fae6b546dbbb2836c311e029131d177363220"}, + {file = "nautobot-2.2.0b1.tar.gz", hash = "sha256:ba5eca68e882af7e364d8fd3194e83d500f4bf668c5bde216c86ec5a4d11bd82"}, ] [package.dependencies] @@ -2362,7 +2315,7 @@ graphene-django = ">=2.16.0,<2.17.0" graphene-django-optimizer = ">=0.8.0,<0.9.0" Jinja2 = ">=3.1.3,<3.2.0" jsonschema = ">=4.7.0,<4.19.0" -Markdown = ">=3.3.7,<3.4.0" +Markdown = ">=3.3.7,<3.6.0" MarkupSafe = ">=2.1.5,<2.2.0" netaddr = ">=0.8.0,<0.9.0" netutils = ">=1.6.0,<2.0.0" @@ -2481,13 +2434,13 @@ textfsm = ">=1.1.3" [[package]] name = "netutils" -version = "1.6.0" +version = "1.7.0" description = "Common helper functions useful in network automation." optional = false python-versions = ">=3.8,<4.0" files = [ - {file = "netutils-1.6.0-py3-none-any.whl", hash = "sha256:e755e6141d0968f1deeb61693a4023f4f5fe1f0dde25d94ac1008f8191d8d237"}, - {file = "netutils-1.6.0.tar.gz", hash = "sha256:bd2fa691e172fe9d5c9e6fc5e2593316eb7fd2c36450454894ed13b274763d70"}, + {file = "netutils-1.7.0-py3-none-any.whl", hash = "sha256:ad2e65d2e5bb7cf857faeee96f03b8823782c509cb003f2e4e86cccf5b0a3328"}, + {file = "netutils-1.7.0.tar.gz", hash = "sha256:e0f461092e02c03166a6830706377dfe079b661ad9e41940f265424121621dc8"}, ] [package.extras] @@ -2495,27 +2448,27 @@ optionals = ["jsonschema (>=4.17.3,<5.0.0)", "napalm (>=4.0.0,<5.0.0)"] [[package]] name = "nh3" -version = "0.2.15" +version = "0.2.17" description = "Python bindings to the ammonia HTML sanitization library." optional = false python-versions = "*" files = [ - {file = "nh3-0.2.15-cp37-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:9c0d415f6b7f2338f93035bba5c0d8c1b464e538bfbb1d598acd47d7969284f0"}, - {file = "nh3-0.2.15-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:6f42f99f0cf6312e470b6c09e04da31f9abaadcd3eb591d7d1a88ea931dca7f3"}, - {file = "nh3-0.2.15-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac19c0d68cd42ecd7ead91a3a032fdfff23d29302dbb1311e641a130dfefba97"}, - {file = "nh3-0.2.15-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f0d77272ce6d34db6c87b4f894f037d55183d9518f948bba236fe81e2bb4e28"}, - {file = "nh3-0.2.15-cp37-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:8d595df02413aa38586c24811237e95937ef18304e108b7e92c890a06793e3bf"}, - {file = "nh3-0.2.15-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86e447a63ca0b16318deb62498db4f76fc60699ce0a1231262880b38b6cff911"}, - {file = "nh3-0.2.15-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3277481293b868b2715907310c7be0f1b9d10491d5adf9fce11756a97e97eddf"}, - {file = "nh3-0.2.15-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60684857cfa8fdbb74daa867e5cad3f0c9789415aba660614fe16cd66cbb9ec7"}, - {file = "nh3-0.2.15-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3b803a5875e7234907f7d64777dfde2b93db992376f3d6d7af7f3bc347deb305"}, - {file = "nh3-0.2.15-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0d02d0ff79dfd8208ed25a39c12cbda092388fff7f1662466e27d97ad011b770"}, - {file = "nh3-0.2.15-cp37-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:f3b53ba93bb7725acab1e030bc2ecd012a817040fd7851b332f86e2f9bb98dc6"}, - {file = "nh3-0.2.15-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:b1e97221cedaf15a54f5243f2c5894bb12ca951ae4ddfd02a9d4ea9df9e1a29d"}, - {file = "nh3-0.2.15-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a5167a6403d19c515217b6bcaaa9be420974a6ac30e0da9e84d4fc67a5d474c5"}, - {file = "nh3-0.2.15-cp37-abi3-win32.whl", hash = "sha256:427fecbb1031db085eaac9931362adf4a796428ef0163070c484b5a768e71601"}, - {file = "nh3-0.2.15-cp37-abi3-win_amd64.whl", hash = "sha256:bc2d086fb540d0fa52ce35afaded4ea526b8fc4d3339f783db55c95de40ef02e"}, - {file = "nh3-0.2.15.tar.gz", hash = "sha256:d1e30ff2d8d58fb2a14961f7aac1bbb1c51f9bdd7da727be35c63826060b0bf3"}, + {file = "nh3-0.2.17-cp37-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:551672fd71d06cd828e282abdb810d1be24e1abb7ae2543a8fa36a71c1006fe9"}, + {file = "nh3-0.2.17-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:c551eb2a3876e8ff2ac63dff1585236ed5dfec5ffd82216a7a174f7c5082a78a"}, + {file = "nh3-0.2.17-cp37-abi3-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:66f17d78826096291bd264f260213d2b3905e3c7fae6dfc5337d49429f1dc9f3"}, + {file = "nh3-0.2.17-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0316c25b76289cf23be6b66c77d3608a4fdf537b35426280032f432f14291b9a"}, + {file = "nh3-0.2.17-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:22c26e20acbb253a5bdd33d432a326d18508a910e4dcf9a3316179860d53345a"}, + {file = "nh3-0.2.17-cp37-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:85cdbcca8ef10733bd31f931956f7fbb85145a4d11ab9e6742bbf44d88b7e351"}, + {file = "nh3-0.2.17-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40015514022af31975c0b3bca4014634fa13cb5dc4dbcbc00570acc781316dcc"}, + {file = "nh3-0.2.17-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ba73a2f8d3a1b966e9cdba7b211779ad8a2561d2dba9674b8a19ed817923f65f"}, + {file = "nh3-0.2.17-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c21bac1a7245cbd88c0b0e4a420221b7bfa838a2814ee5bb924e9c2f10a1120b"}, + {file = "nh3-0.2.17-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d7a25fd8c86657f5d9d576268e3b3767c5cd4f42867c9383618be8517f0f022a"}, + {file = "nh3-0.2.17-cp37-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:c790769152308421283679a142dbdb3d1c46c79c823008ecea8e8141db1a2062"}, + {file = "nh3-0.2.17-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:b4427ef0d2dfdec10b641ed0bdaf17957eb625b2ec0ea9329b3d28806c153d71"}, + {file = "nh3-0.2.17-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a3f55fabe29164ba6026b5ad5c3151c314d136fd67415a17660b4aaddacf1b10"}, + {file = "nh3-0.2.17-cp37-abi3-win32.whl", hash = "sha256:1a814dd7bba1cb0aba5bcb9bebcc88fd801b63e21e2450ae6c52d3b3336bc911"}, + {file = "nh3-0.2.17-cp37-abi3-win_amd64.whl", hash = "sha256:1aa52a7def528297f256de0844e8dd680ee279e79583c76d6fa73a978186ddfb"}, + {file = "nh3-0.2.17.tar.gz", hash = "sha256:40d0741a19c3d645e54efba71cb0d8c475b59135c1e3c580f879ad5514cbf028"}, ] [[package]] @@ -2566,17 +2519,17 @@ nornir = ">=3,<4" [[package]] name = "nornir-nautobot" -version = "3.1.1" +version = "3.1.2" description = "Nornir Nautobot" optional = false python-versions = ">=3.8,<4.0" files = [ - {file = "nornir_nautobot-3.1.1-py3-none-any.whl", hash = "sha256:2a21d134ddcedcf2344e5e0d825fb9ab4f32d913294679fe9cfe1eeb19272256"}, - {file = "nornir_nautobot-3.1.1.tar.gz", hash = "sha256:c36ff2d8626131b91d0bdb24967782d39db58e2be3b011b8be5d30e741562556"}, + {file = "nornir_nautobot-3.1.2-py3-none-any.whl", hash = "sha256:b93597c507371674ca83b36509d043419ca264572d01151746c52c37bf1421f6"}, + {file = "nornir_nautobot-3.1.2.tar.gz", hash = "sha256:69fcf4da83b53b876267f18451b9bb7071218519bfdb8969fb1744825a029901"}, ] [package.dependencies] -httpx = ">=0.24.1,<0.25.0" +httpx = ">=0.23.0,<=0.27.0" netutils = ">=1.6.0,<2.0.0" nornir = ">=3.0.0,<4.0.0" nornir-jinja2 = ">=0.2.0,<0.3.0" @@ -2620,13 +2573,13 @@ nornir = ">=3,<4" [[package]] name = "ntc-templates" -version = "4.3.0" +version = "4.4.0" description = "TextFSM Templates for Network Devices, and Python wrapper for TextFSM's CliTable." optional = false python-versions = ">=3.8,<4.0" files = [ - {file = "ntc_templates-4.3.0-py3-none-any.whl", hash = "sha256:f9b4805dfd9d1516a29ae9f505409c17c7f14c958d47f1c1f57c9486af6164db"}, - {file = "ntc_templates-4.3.0.tar.gz", hash = "sha256:b6902389e86b868d76b64ea55c8225a0aa7aafe910b3a02b2a33b7b18fb27ef1"}, + {file = "ntc_templates-4.4.0-py3-none-any.whl", hash = "sha256:e36274b8ba60e2e2e96f2948e23f6929c79ac0167f87994d4ac4d76e4a8be4ea"}, + {file = "ntc_templates-4.4.0.tar.gz", hash = "sha256:2f630571f510a8c6573c8b2a1f584494b10505577e446b9ee471b5a1c542620d"}, ] [package.dependencies] @@ -3153,23 +3106,23 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pylint" -version = "2.17.7" +version = "3.1.0" description = "python code static checker" optional = false -python-versions = ">=3.7.2" +python-versions = ">=3.8.0" files = [ - {file = "pylint-2.17.7-py3-none-any.whl", hash = "sha256:27a8d4c7ddc8c2f8c18aa0050148f89ffc09838142193fdbe98f172781a3ff87"}, - {file = "pylint-2.17.7.tar.gz", hash = "sha256:f4fcac7ae74cfe36bc8451e931d8438e4a476c20314b1101c458ad0f05191fad"}, + {file = "pylint-3.1.0-py3-none-any.whl", hash = "sha256:507a5b60953874766d8a366e8e8c7af63e058b26345cfcb5f91f89d987fd6b74"}, + {file = "pylint-3.1.0.tar.gz", hash = "sha256:6a69beb4a6f63debebaab0a3477ecd0f559aa726af4954fc948c51f7a2549e23"}, ] [package.dependencies] -astroid = ">=2.15.8,<=2.17.0-dev0" +astroid = ">=3.1.0,<=3.2.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, {version = ">=0.3.6", markers = "python_version >= \"3.11\""}, ] -isort = ">=4.2.5,<6" +isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" mccabe = ">=0.6,<0.8" platformdirs = ">=2.2.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} @@ -3200,20 +3153,20 @@ with-django = ["Django (>=2.2)"] [[package]] name = "pylint-nautobot" -version = "0.2.1" +version = "0.3.0" description = "Custom Pylint Rules for Nautobot" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8,<3.12" files = [ - {file = "pylint_nautobot-0.2.1-py3-none-any.whl", hash = "sha256:6656cd571d6e997e6d7e37631308f1de25949a596a8309ab6d47a2e387c892c6"}, - {file = "pylint_nautobot-0.2.1.tar.gz", hash = "sha256:2872106a29236b0e31293efe4a2d02a66527c67f33437f3e2345251c4cf71b4d"}, + {file = "pylint_nautobot-0.3.0-py3-none-any.whl", hash = "sha256:91fed48d9a9f565c6aa46c679b930d64b06d014061f6e7e802e6de8b6b8e3f87"}, + {file = "pylint_nautobot-0.3.0.tar.gz", hash = "sha256:387a1d73b49186a7b325b6c1a3634e2c57ec0f2350e93494304c47073400099b"}, ] [package.dependencies] -importlib-resources = ">=5.12.0,<6.0.0" -pylint = ">=2.13,<3.0" -pyyaml = ">=6.0,<7.0" -tomli = ">=2.0.1,<3.0.0" +importlib-resources = ">=5.12.0" +pylint = ">=2.17.5" +pyyaml = ">=6.0.1" +toml = ">=0.10.2" [[package]] name = "pylint-plugin-utils" @@ -3231,17 +3184,17 @@ pylint = ">=1.7" [[package]] name = "pymdown-extensions" -version = "10.4" +version = "10.7.1" description = "Extension pack for Python Markdown." optional = false python-versions = ">=3.8" files = [ - {file = "pymdown_extensions-10.4-py3-none-any.whl", hash = "sha256:cfc28d6a09d19448bcbf8eee3ce098c7d17ff99f7bd3069db4819af181212037"}, - {file = "pymdown_extensions-10.4.tar.gz", hash = "sha256:bc46f11749ecd4d6b71cf62396104b4a200bad3498cb0f5dad1b8502fe461a35"}, + {file = "pymdown_extensions-10.7.1-py3-none-any.whl", hash = "sha256:f5cc7000d7ff0d1ce9395d216017fa4df3dde800afb1fb72d1c7d3fd35e710f4"}, + {file = "pymdown_extensions-10.7.1.tar.gz", hash = "sha256:c70e146bdd83c744ffc766b4671999796aba18842b268510a329f7f64700d584"}, ] [package.dependencies] -markdown = ">=3.2" +markdown = ">=3.5" pyyaml = "*" [package.extras] @@ -3275,13 +3228,13 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "pynautobot" -version = "2.0.2" +version = "2.1.1" description = "Nautobot API client library" optional = false -python-versions = ">=3.8,<4.0" +python-versions = "<4.0,>=3.8" files = [ - {file = "pynautobot-2.0.2-py3-none-any.whl", hash = "sha256:c0533bcd5ab548d23273f6be49071f09a3dec7cd65ded3507be1707d25bb5f0e"}, - {file = "pynautobot-2.0.2.tar.gz", hash = "sha256:a62f7b35d4f3492a3cfb038abfc3272567dd1d4b88703ab2736db47f40263932"}, + {file = "pynautobot-2.1.1-py3-none-any.whl", hash = "sha256:bcf56fee4733942a87dd07f956418f67580f45d08e5296c8fa3d11316c4ca419"}, + {file = "pynautobot-2.1.1.tar.gz", hash = "sha256:f01907a519689dc842f909f850737f68b53953818c97380a8101406d37e49d1b"}, ] [package.dependencies] @@ -3291,13 +3244,13 @@ urllib3 = ">=1.21.1,<1.27" [[package]] name = "pyparsing" -version = "3.1.1" +version = "3.1.2" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false python-versions = ">=3.6.8" files = [ - {file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"}, - {file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"}, + {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, + {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, ] [package.extras] @@ -3337,13 +3290,13 @@ cron-schedule = ["croniter"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -3472,6 +3425,7 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -3522,17 +3476,17 @@ pyyaml = "*" [[package]] name = "redis" -version = "5.0.1" +version = "5.0.3" description = "Python client for Redis database and key-value store" optional = false python-versions = ">=3.7" files = [ - {file = "redis-5.0.1-py3-none-any.whl", hash = "sha256:ed4802971884ae19d640775ba3b03aa2e7bd5e8fb8dfaed2decce4d0fc48391f"}, - {file = "redis-5.0.1.tar.gz", hash = "sha256:0dab495cd5753069d3bc650a0dde8a8f9edde16fc5691b689a566eda58100d0f"}, + {file = "redis-5.0.3-py3-none-any.whl", hash = "sha256:5da9b8fe9e1254293756c16c008e8620b3d15fcc6dde6babde9541850e72a32d"}, + {file = "redis-5.0.3.tar.gz", hash = "sha256:4973bae7444c0fbed64a06b87446f79361cb7e4ec1538c022d696ed7a5015580"}, ] [package.dependencies] -async-timeout = {version = ">=4.0.2", markers = "python_full_version <= \"3.11.2\""} +async-timeout = {version = ">=4.0.3", markers = "python_full_version < \"3.11.3\""} [package.extras] hiredis = ["hiredis (>=1.0.0)"] @@ -3540,13 +3494,13 @@ ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)" [[package]] name = "referencing" -version = "0.33.0" +version = "0.34.0" description = "JSON Referencing + Python" optional = false python-versions = ">=3.8" files = [ - {file = "referencing-0.33.0-py3-none-any.whl", hash = "sha256:39240f2ecc770258f28b642dd47fd74bc8b02484de54e1882b74b35ebd779bd5"}, - {file = "referencing-0.33.0.tar.gz", hash = "sha256:c775fedf74bc0f9189c2a3be1c12fd03e8c23f4d371dce795df44e06c5b412f7"}, + {file = "referencing-0.34.0-py3-none-any.whl", hash = "sha256:d53ae300ceddd3169f1ffa9caf2cb7b769e92657e4fafb23d34b93679116dfd4"}, + {file = "referencing-0.34.0.tar.gz", hash = "sha256:5773bd84ef41799a5a8ca72dc34590c041eb01bf9aa02632b4a973fb0181a844"}, ] [package.dependencies] @@ -3678,13 +3632,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-oauthlib" -version = "1.3.1" +version = "2.0.0" description = "OAuthlib authentication support for Requests." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.4" files = [ - {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, - {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, + {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, + {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, ] [package.dependencies] @@ -3696,13 +3650,13 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] [[package]] name = "rich" -version = "13.7.0" +version = "13.7.1" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.7.0-py3-none-any.whl", hash = "sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235"}, - {file = "rich-13.7.0.tar.gz", hash = "sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa"}, + {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, + {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, ] [package.dependencies] @@ -3900,28 +3854,28 @@ files = [ [[package]] name = "ruff" -version = "0.2.2" +version = "0.3.4" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.2.2-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0a9efb032855ffb3c21f6405751d5e147b0c6b631e3ca3f6b20f917572b97eb6"}, - {file = "ruff-0.2.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d450b7fbff85913f866a5384d8912710936e2b96da74541c82c1b458472ddb39"}, - {file = "ruff-0.2.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecd46e3106850a5c26aee114e562c329f9a1fbe9e4821b008c4404f64ff9ce73"}, - {file = "ruff-0.2.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e22676a5b875bd72acd3d11d5fa9075d3a5f53b877fe7b4793e4673499318ba"}, - {file = "ruff-0.2.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1695700d1e25a99d28f7a1636d85bafcc5030bba9d0578c0781ba1790dbcf51c"}, - {file = "ruff-0.2.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:b0c232af3d0bd8f521806223723456ffebf8e323bd1e4e82b0befb20ba18388e"}, - {file = "ruff-0.2.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f63d96494eeec2fc70d909393bcd76c69f35334cdbd9e20d089fb3f0640216ca"}, - {file = "ruff-0.2.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a61ea0ff048e06de273b2e45bd72629f470f5da8f71daf09fe481278b175001"}, - {file = "ruff-0.2.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e1439c8f407e4f356470e54cdecdca1bd5439a0673792dbe34a2b0a551a2fe3"}, - {file = "ruff-0.2.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:940de32dc8853eba0f67f7198b3e79bc6ba95c2edbfdfac2144c8235114d6726"}, - {file = "ruff-0.2.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:0c126da55c38dd917621552ab430213bdb3273bb10ddb67bc4b761989210eb6e"}, - {file = "ruff-0.2.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:3b65494f7e4bed2e74110dac1f0d17dc8e1f42faaa784e7c58a98e335ec83d7e"}, - {file = "ruff-0.2.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1ec49be4fe6ddac0503833f3ed8930528e26d1e60ad35c2446da372d16651ce9"}, - {file = "ruff-0.2.2-py3-none-win32.whl", hash = "sha256:d920499b576f6c68295bc04e7b17b6544d9d05f196bb3aac4358792ef6f34325"}, - {file = "ruff-0.2.2-py3-none-win_amd64.whl", hash = "sha256:cc9a91ae137d687f43a44c900e5d95e9617cb37d4c989e462980ba27039d239d"}, - {file = "ruff-0.2.2-py3-none-win_arm64.whl", hash = "sha256:c9d15fc41e6054bfc7200478720570078f0b41c9ae4f010bcc16bd6f4d1aacdd"}, - {file = "ruff-0.2.2.tar.gz", hash = "sha256:e62ed7f36b3068a30ba39193a14274cd706bc486fad521276458022f7bccb31d"}, + {file = "ruff-0.3.4-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:60c870a7d46efcbc8385d27ec07fe534ac32f3b251e4fc44b3cbfd9e09609ef4"}, + {file = "ruff-0.3.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6fc14fa742e1d8f24910e1fff0bd5e26d395b0e0e04cc1b15c7c5e5fe5b4af91"}, + {file = "ruff-0.3.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3ee7880f653cc03749a3bfea720cf2a192e4f884925b0cf7eecce82f0ce5854"}, + {file = "ruff-0.3.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf133dd744f2470b347f602452a88e70dadfbe0fcfb5fd46e093d55da65f82f7"}, + {file = "ruff-0.3.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f3860057590e810c7ffea75669bdc6927bfd91e29b4baa9258fd48b540a4365"}, + {file = "ruff-0.3.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:986f2377f7cf12efac1f515fc1a5b753c000ed1e0a6de96747cdf2da20a1b369"}, + {file = "ruff-0.3.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fd98e85869603e65f554fdc5cddf0712e352fe6e61d29d5a6fe087ec82b76c"}, + {file = "ruff-0.3.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64abeed785dad51801b423fa51840b1764b35d6c461ea8caef9cf9e5e5ab34d9"}, + {file = "ruff-0.3.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df52972138318bc7546d92348a1ee58449bc3f9eaf0db278906eb511889c4b50"}, + {file = "ruff-0.3.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:98e98300056445ba2cc27d0b325fd044dc17fcc38e4e4d2c7711585bd0a958ed"}, + {file = "ruff-0.3.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:519cf6a0ebed244dce1dc8aecd3dc99add7a2ee15bb68cf19588bb5bf58e0488"}, + {file = "ruff-0.3.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:bb0acfb921030d00070539c038cd24bb1df73a2981e9f55942514af8b17be94e"}, + {file = "ruff-0.3.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cf187a7e7098233d0d0c71175375c5162f880126c4c716fa28a8ac418dcf3378"}, + {file = "ruff-0.3.4-py3-none-win32.whl", hash = "sha256:af27ac187c0a331e8ef91d84bf1c3c6a5dea97e912a7560ac0cef25c526a4102"}, + {file = "ruff-0.3.4-py3-none-win_amd64.whl", hash = "sha256:de0d5069b165e5a32b3c6ffbb81c350b1e3d3483347196ffdf86dc0ef9e37dd6"}, + {file = "ruff-0.3.4-py3-none-win_arm64.whl", hash = "sha256:6810563cc08ad0096b57c717bd78aeac888a1bfd38654d9113cb3dc4d3f74232"}, + {file = "ruff-0.3.4.tar.gz", hash = "sha256:f0f4484c6541a99862b693e13a151435a279b271cff20e37101116a21e2a1ad1"}, ] [[package]] @@ -3950,18 +3904,18 @@ paramiko = "*" [[package]] name = "setuptools" -version = "69.1.1" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"}, - {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -4180,13 +4134,13 @@ files = [ [[package]] name = "tomlkit" -version = "0.12.3" +version = "0.12.4" description = "Style preserving TOML library" optional = false python-versions = ">=3.7" files = [ - {file = "tomlkit-0.12.3-py3-none-any.whl", hash = "sha256:b0a645a9156dc7cb5d3a1f0d4bab66db287fcb8e0430bdd4664a095ea16414ba"}, - {file = "tomlkit-0.12.3.tar.gz", hash = "sha256:75baf5012d06501f07bee5bf8e801b9f343e7aac5a92581f20f80ce632e6b5a4"}, + {file = "tomlkit-0.12.4-py3-none-any.whl", hash = "sha256:5cd82d48a3dd89dee1f9d64420aa20ae65cfbd00668d6f094d7578a78efbb77b"}, + {file = "tomlkit-0.12.4.tar.gz", hash = "sha256:7ca1cfc12232806517a8515047ba66a19369e71edf2439d0f5824f91032b6cc3"}, ] [[package]] @@ -4213,18 +4167,18 @@ dev = ["furo", "packaging", "sphinx (>=5)", "twisted"] [[package]] name = "traitlets" -version = "5.14.1" +version = "5.14.2" description = "Traitlets Python configuration system" optional = false python-versions = ">=3.8" files = [ - {file = "traitlets-5.14.1-py3-none-any.whl", hash = "sha256:2e5a030e6eff91737c643231bfcf04a65b0132078dad75e4936700b213652e74"}, - {file = "traitlets-5.14.1.tar.gz", hash = "sha256:8585105b371a04b8316a43d5ce29c098575c2e477850b62b848b964f1444527e"}, + {file = "traitlets-5.14.2-py3-none-any.whl", hash = "sha256:fcdf85684a772ddeba87db2f398ce00b40ff550d1528c03c14dbf6a02003cd80"}, + {file = "traitlets-5.14.2.tar.gz", hash = "sha256:8cdd83c040dab7d1dee822678e5f5d100b514f7b72b01615b26fc5718916fdf9"}, ] [package.extras] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<7.5)", "pytest-mock", "pytest-mypy-testing"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.1)", "pytest-mock", "pytest-mypy-testing"] [[package]] name = "transitions" @@ -4390,97 +4344,18 @@ files = [ [[package]] name = "wheel" -version = "0.42.0" +version = "0.43.0" description = "A built-package format for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "wheel-0.42.0-py3-none-any.whl", hash = "sha256:177f9c9b0d45c47873b619f5b650346d632cdc35fb5e4d25058e09c9e581433d"}, - {file = "wheel-0.42.0.tar.gz", hash = "sha256:c45be39f7882c9d34243236f2d63cbd58039e360f85d0913425fbd7ceea617a8"}, + {file = "wheel-0.43.0-py3-none-any.whl", hash = "sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81"}, + {file = "wheel-0.43.0.tar.gz", hash = "sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85"}, ] [package.extras] test = ["pytest (>=6.0.0)", "setuptools (>=65)"] -[[package]] -name = "wrapt" -version = "1.16.0" -description = "Module for decorators, wrappers and monkey patching." -optional = false -python-versions = ">=3.6" -files = [ - {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, - {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, - {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, - {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, - {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, - {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, - {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, - {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, - {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, - {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, - {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, - {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, - {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, - {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, - {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, - {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, - {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, - {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, -] - [[package]] name = "yamllint" version = "1.35.1" @@ -4515,18 +4390,18 @@ pyyaml = "*" [[package]] name = "zipp" -version = "3.17.0" +version = "3.18.1" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, - {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, + {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, + {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [extras] all = [] diff --git a/pyproject.toml b/pyproject.toml index 91f0b94a..ec19a8a1 100755 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nautobot-device-onboarding" -version = "3.0.2a9" +version = "3.0.2a10" description = "A app for Nautobot to easily onboard new devices." authors = ["Network to Code, LLC "] license = "Apache-2.0" From 9edc3f153d62c113f56d9a0578d2331e27d17d8c Mon Sep 17 00:00:00 2001 From: David Cates Date: Thu, 28 Mar 2024 10:21:27 -0700 Subject: [PATCH 177/225] black --- .../adapters/network_importer_adapters.py | 34 +++++++++++-------- 1 file changed, 20 insertions(+), 14 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index 57d82fd2..23bc5212 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -54,7 +54,7 @@ class NetworkImporterNautobotAdapter(FilteredNautobotAdapter): def _cache_primary_ips(self, device_queryset): """ Create a cache of primary ip address for devices. - + If the primary ip address of a device is unset due to the deletion of an interface, this cache is used to reset it. """ @@ -201,7 +201,7 @@ def load(self): """Generic implementation of the load function.""" if not hasattr(self, "top_level") or not self.top_level: raise ValueError("'top_level' needs to be set on the class.") - + self._cache_primary_ips(device_queryset=self.job.devices_to_load) for model_name in self.top_level: @@ -227,12 +227,12 @@ def sync_complete(self, source, diff, *args, **kwargs): Assign the primary ip address to a device and update the management interface setting. Syncing interfaces may result in the deletion of the original management interface. If - this happens, the primary IP Address for the device should be set and the management only + this happens, the primary IP Address for the device should be set and the management only option on the appropriate interface should be set to True. This method only runs if data was changed. """ - for device in self.job.devices_to_load.all(): # refresh queryset after sync is complete + for device in self.job.devices_to_load.all(): # refresh queryset after sync is complete if self.job.debug: self.job.logger.debug("Sync Complete method called, checking for missing primary ip addresses...") if not device.primary_ip: @@ -243,24 +243,30 @@ def sync_complete(self, source, diff, *args, **kwargs): device.validated_save() self.job.logger.info(f"Assigning {ip_address} as primary IP Address for Device: {device.name}") except Exception as err: - self.job.logger.error(f"Unable to set Primary IP for {device.name}, {err.args}. " - "Please check the primary IP Address assignment for this device.") + self.job.logger.error( + f"Unable to set Primary IP for {device.name}, {err.args}. " + "Please check the primary IP Address assignment for this device." + ) if ip_address: try: - interface = Interface.objects.get( - device=device, - ip_addresses__in=[ip_address] - ) + interface = Interface.objects.get(device=device, ip_addresses__in=[ip_address]) interface.mgmt_only = True interface.validated_save() - self.job.logger.info(f"Management only set for interface: {interface.name} on device: {device.name}") + self.job.logger.info( + f"Management only set for interface: {interface.name} on device: {device.name}" + ) except Exception as err: - self.job.logger.error("Failed to set management only on the " - f"management interface for {device.name}, {err}, {err.args}") + self.job.logger.error( + "Failed to set management only on the " + f"management interface for {device.name}, {err}, {err.args}" + ) else: - self.job.logger.error(f"Failed to set management only on the managmeent interface for {device.name}") + self.job.logger.error( + f"Failed to set management only on the managmeent interface for {device.name}" + ) return super().sync_complete(source, diff, *args, **kwargs) + class MacUnixExpandedUppercase(mac_unix_expanded): """Mac Unix Expanded Uppercase.""" From 3d4018c140e2cc84b4c77a44c334f3fb9c8c1c3d Mon Sep 17 00:00:00 2001 From: David Cates Date: Thu, 28 Mar 2024 10:35:39 -0700 Subject: [PATCH 178/225] pylint --- .../diffsync/adapters/network_importer_adapters.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index 23bc5212..4c71cd61 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -242,7 +242,7 @@ def sync_complete(self, source, diff, *args, **kwargs): device.primary_ip4 = ip_address device.validated_save() self.job.logger.info(f"Assigning {ip_address} as primary IP Address for Device: {device.name}") - except Exception as err: + except Exception as err: # pylint: disable=broad-exception-caught self.job.logger.error( f"Unable to set Primary IP for {device.name}, {err.args}. " "Please check the primary IP Address assignment for this device." @@ -255,7 +255,7 @@ def sync_complete(self, source, diff, *args, **kwargs): self.job.logger.info( f"Management only set for interface: {interface.name} on device: {device.name}" ) - except Exception as err: + except Exception as err: # pylint: disable=broad-exception-caught self.job.logger.error( "Failed to set management only on the " f"management interface for {device.name}, {err}, {err.args}" From cf646aecc02240f237bd6a805a89173a4b82c4d3 Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Fri, 29 Mar 2024 20:19:15 +0000 Subject: [PATCH 179/225] add error handling --- .../nornir_plays/processor.py | 2 +- nautobot_device_onboarding/utils/formatter.py | 267 +++++++++--------- 2 files changed, 140 insertions(+), 129 deletions(-) diff --git a/nautobot_device_onboarding/nornir_plays/processor.py b/nautobot_device_onboarding/nornir_plays/processor.py index 8392b79b..03617763 100755 --- a/nautobot_device_onboarding/nornir_plays/processor.py +++ b/nautobot_device_onboarding/nornir_plays/processor.py @@ -82,7 +82,7 @@ def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult def subtask_instance_started(self, task: Task, host: Host) -> None: # show command start """Processor for logging and data processing on subtask start.""" - self.logger.info(f"subtask_instance_started Subtask starting {task.name}.", extra={"object": task.host}) + self.logger.info(f"subtask_instance_started Subtask starting {task.name}, {task.host}.", extra={"object": task.host}) if not self.data.get(host.name): self.data[host.name] = { "platform": host.platform, diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index ec43b3c4..604df0dd 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -127,131 +127,139 @@ def map_interface_type(interface_type): def format_ios_results(device): """Format the results of the show commands for IOS devices.""" - serial = device.get("serial") - mtu_list = device.get("mtu", []) - type_list = device.get("type", []) - ip_list = device.get("ip_addresses", []) - prefix_list = device.get("prefix_length", []) - mac_list = device.get("mac_address", []) - description_list = device.get("description", []) - link_status_list = device.get("link_status", []) - - interface_dict = {} - for item in mtu_list: - interface_dict.setdefault(item["interface"], {})["mtu"] = item["mtu"] - for item in type_list: - interface_type = map_interface_type(item["type"]) - interface_dict.setdefault(item["interface"], {})["type"] = interface_type - for item in ip_list: - interface_dict.setdefault(item["interface"], {})["ip_addresses"] = {"ip_address": item["ip_address"]} - for item in prefix_list: - interface_dict.setdefault(item["interface"], {}).setdefault("ip_addresses", {})["prefix_length"] = item[ - "prefix_length" - ] - for item in mac_list: - interface_dict.setdefault(item["interface"], {})["mac_address"] = item["mac_address"] - for item in description_list: - interface_dict.setdefault(item["interface"], {})["description"] = item["description"] - for item in link_status_list: - interface_dict.setdefault(item["interface"], {})["link_status"] = True if item["link_status"] == "up" else False - for interface in interface_dict.values(): - interface.setdefault("802.1Q_mode", "") - interface.setdefault("lag", "") - interface.setdefault("untagged_vlan", {}) - interface.setdefault("tagged_vlans", []) - - for interface, data in interface_dict.items(): - ip_addresses = data.get("ip_addresses", {}) - if ip_addresses: - data["ip_addresses"] = [ip_addresses] - - # Convert interface names to canonical form - interface_list = [] - for interface_name, interface_info in interface_dict.items(): - interface_list.append({canonical_interface_name(interface_name): interface_info}) - - device["interfaces"] = interface_list - device["serial"] = serial try: - del device["mtu"] - del device["type"] - del device["ip_addresses"] - del device["prefix_length"] - del device["mac_address"] - del device["description"] - del device["link_status"] - - except KeyError: - pass - + serial = device.get("serial") + mtu_list = device.get("mtu", []) + type_list = device.get("type", []) + ip_list = device.get("ip_addresses", []) + prefix_list = device.get("prefix_length", []) + mac_list = device.get("mac_address", []) + description_list = device.get("description", []) + link_status_list = device.get("link_status", []) + #vrf_list = device.get("vrf", []) + #print(f"vrf_list: {vrf_list}") + + interface_dict = {} + for item in mtu_list: + interface_dict.setdefault(item["interface"], {})["mtu"] = item["mtu"] + for item in type_list: + interface_type = map_interface_type(item["type"]) + interface_dict.setdefault(item["interface"], {})["type"] = interface_type + for item in ip_list: + interface_dict.setdefault(item["interface"], {})["ip_addresses"] = {"ip_address": item["ip_address"]} + for item in prefix_list: + interface_dict.setdefault(item["interface"], {}).setdefault("ip_addresses", {})["prefix_length"] = item[ + "prefix_length" + ] + for item in mac_list: + interface_dict.setdefault(item["interface"], {})["mac_address"] = item["mac_address"] + for item in description_list: + interface_dict.setdefault(item["interface"], {})["description"] = item["description"] + for item in link_status_list: + interface_dict.setdefault(item["interface"], {})["link_status"] = True if item["link_status"] == "up" else False + for interface in interface_dict.values(): + interface.setdefault("802.1Q_mode", "") + interface.setdefault("lag", "") + interface.setdefault("untagged_vlan", {}) + interface.setdefault("tagged_vlans", []) + + for interface, data in interface_dict.items(): + ip_addresses = data.get("ip_addresses", {}) + if ip_addresses: + data["ip_addresses"] = [ip_addresses] + + # Convert interface names to canonical form + interface_list = [] + for interface_name, interface_info in interface_dict.items(): + interface_list.append({canonical_interface_name(interface_name): interface_info}) + + device["interfaces"] = interface_list + device["serial"] = serial + try: + del device["mtu"] + del device["type"] + del device["ip_addresses"] + del device["prefix_length"] + del device["mac_address"] + del device["description"] + del device["link_status"] + + except KeyError: + pass + except Error as e: + device = { "failed": True, "failed_reason": f"Formatting error for device {device}"} return device def format_nxos_results(device): """Format the results of the show commands for NX-OS devices.""" - serial = device.get("serial") - mtu_list = device.get("mtu", []) - type_list = device.get("type", []) - ip_list = device.get("ip_addresses", []) - prefix_list = device.get("prefix_length", []) - mac_list = device.get("mac_address", []) - description_list = device.get("description", []) - link_status_list = device.get("link_status", []) - mode_list = device.get("mode", []) - - interface_dict = {} - for item in mtu_list: - interface_dict.setdefault(item["interface"], {})["mtu"] = item["mtu"] - for item in type_list: - interface_type = map_interface_type(item["type"]) - interface_dict.setdefault(item["interface"], {})["type"] = interface_type - for item in ip_list: - interface_dict.setdefault(item["interface"], {})["ip_addresses"] = {"ip_address": item["ip_address"]} - for item in prefix_list: - interface_dict.setdefault(item["interface"], {}).setdefault("ip_addresses", {})["prefix_length"] = item[ - "prefix_length" - ] - for item in mac_list: - interface_dict.setdefault(item["interface"], {})["mac_address"] = item["mac_address"] - for item in description_list: - interface_dict.setdefault(item["interface"], {})["description"] = item["description"] - for item in link_status_list: - interface_dict.setdefault(item["interface"], {})["link_status"] = True if item["link_status"] == "up" else False - for item in mode_list: - interface_dict.setdefault(item["interface"], {})["802.1Q_mode"] = ( - "access" if item["mode"] == "access" else "tagged" if item["mode"] == "trunk" else "" - ) - - for interface in interface_dict.values(): - # interface.setdefault("802.1Q_mode", "") - interface.setdefault("lag", "") - interface.setdefault("untagged_vlan", {}) - interface.setdefault("tagged_vlans", []) - - for interface, data in interface_dict.items(): - ip_addresses = data.get("ip_addresses", {}) - if ip_addresses: - data["ip_addresses"] = [ip_addresses] - - # Convert interface names to canonical form - interface_list = [] - for interface_name, interface_info in interface_dict.items(): - interface_list.append({canonical_interface_name(interface_name): interface_info}) - - device["interfaces"] = interface_list - device["serial"] = serial try: - del device["mtu"] - del device["type"] - del device["ip_addresses"] - del device["prefix_length"] - del device["mac_address"] - del device["description"] - del device["link_status"] - del device["mode"] - except KeyError: - pass - + serial = device.get("serial") + mtu_list = device.get("mtu", []) + type_list = device.get("type", []) + ip_list = device.get("ip_addresses", []) + prefix_list = device.get("prefix_length", []) + mac_list = device.get("mac_address", []) + description_list = device.get("description", []) + link_status_list = device.get("link_status", []) + mode_list = device.get("mode", []) + + interface_dict = {} + for item in mtu_list: + interface_dict.setdefault(item["interface"], {})["mtu"] = item["mtu"] + for item in type_list: + interface_type = map_interface_type(item["type"]) + interface_dict.setdefault(item["interface"], {})["type"] = interface_type + for item in ip_list: + interface_dict.setdefault(item["interface"], {})["ip_addresses"] = {"ip_address": item["ip_address"]} + for item in prefix_list: + interface_dict.setdefault(item["interface"], {}).setdefault("ip_addresses", {})["prefix_length"] = item[ + "prefix_length" + ] + for item in mac_list: + interface_dict.setdefault(item["interface"], {})["mac_address"] = item["mac_address"] + for item in description_list: + interface_dict.setdefault(item["interface"], {})["description"] = item["description"] + for item in link_status_list: + interface_dict.setdefault(item["interface"], {})["link_status"] = True if item["link_status"] == "up" else False + for item in mode_list: + interface_dict.setdefault(item["interface"], {})["802.1Q_mode"] = ( + "access" if item["mode"] == "access" else "tagged" if item["mode"] == "trunk" else "" + ) + + for interface in interface_dict.values(): + # interface.setdefault("802.1Q_mode", "") + interface.setdefault("lag", "") + interface.setdefault("untagged_vlan", {}) + interface.setdefault("tagged_vlans", []) + + for interface, data in interface_dict.items(): + ip_addresses = data.get("ip_addresses", {}) + if ip_addresses: + data["ip_addresses"] = [ip_addresses] + + # Convert interface names to canonical form + interface_list = [] + for interface_name, interface_info in interface_dict.items(): + interface_list.append({canonical_interface_name(interface_name): interface_info}) + + device["interfaces"] = interface_list + device["serial"] = serial + try: + del device["mtu"] + del device["type"] + del device["ip_addresses"] + del device["prefix_length"] + del device["mac_address"] + del device["description"] + del device["link_status"] + del device["mode"] + except KeyError: + pass + except Error as e: + device = { "failed": True, "failed_reason": f"Formatting error for device {device}"} + return device + return device @@ -269,15 +277,18 @@ def format_results(compiled_results): Returns: compiled_results (dict): The formatted results. """ - for device in compiled_results: - platform = compiled_results[device]["platform"] - if platform in ["cisco_ios", "cisco_xe"]: - format_ios_results(compiled_results[device]) - elif platform == "cisco_nxos": - format_nxos_results(compiled_results[device]) - elif platform == "juniper_junos": - format_junos_results(compiled_results[device]) + for device, data in compiled_results.items(): + print(f"data: {data}") + if "platform" in data: + platform = data.get("platform") + if platform not in ["cisco_ios", "cisco_xe", "cisco_nxos"]: + data.update({"failed": True, "failed_reason": f"Unsupported platform {platform}"}) + if "type" in data: + if platform in ["cisco_ios", "cisco_xe"]: + format_ios_results(data) + elif platform == "cisco_nxos": + format_nxos_results(data) else: - raise ValueError(f"Unsupported platform {platform}") - + data.update({"failed": True, "failed_reason": "Cannot connect to device."}) + return compiled_results From 71ae5321e94a98696d481789e741ecdaa1dc6bdd Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Mon, 1 Apr 2024 16:44:09 +0000 Subject: [PATCH 180/225] bump version --- .../command_mappers/juniper_junos.yml | 6 + nautobot_device_onboarding/utils/formatter.py | 2 +- poetry.lock | 231 ++++++++++-------- pyproject.toml | 2 +- 4 files changed, 142 insertions(+), 99 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/juniper_junos.yml b/nautobot_device_onboarding/command_mappers/juniper_junos.yml index bfdec521..8543321a 100755 --- a/nautobot_device_onboarding/command_mappers/juniper_junos.yml +++ b/nautobot_device_onboarding/command_mappers/juniper_junos.yml @@ -31,3 +31,9 @@ device_onboarding: use_textfsm: false jpath: '"route-information"[]."route-table"[]."rt"[]."rt-destination"[].data' # yamllint disable-line rule:quoted-strings post_processor: "{% set mask = [] %}{% for ip_route in obj %}{% if ip_route | is_network %}{% if ip_route | ipaddress_network('version') == 4 %}{% if original_host | is_ip_within(ip_route) %}{% set _=mask.append(ip_route.split('/')[1]) %}{% endif %}{% endif %}{% endif %}{% endfor %}{{ mask | unique | first}}" +network_importer: + hostname: + commands: + - command: "show version | display json" + use_textfsm: false + jpath: '"software-information"[]."host-name"[].data' # yamllint disable-line rule:quoted-strings \ No newline at end of file diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index 604df0dd..b37d53fb 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -278,10 +278,10 @@ def format_results(compiled_results): compiled_results (dict): The formatted results. """ for device, data in compiled_results.items(): - print(f"data: {data}") if "platform" in data: platform = data.get("platform") if platform not in ["cisco_ios", "cisco_xe", "cisco_nxos"]: + print(f"Unsupported platform {platform}") data.update({"failed": True, "failed_reason": f"Unsupported platform {platform}"}) if "type" in data: if platform in ["cisco_ios", "cisco_xe"]: diff --git a/poetry.lock b/poetry.lock index dcbb8e9a..069befbf 100755 --- a/poetry.lock +++ b/poetry.lock @@ -1249,13 +1249,13 @@ sidecar = ["drf-spectacular-sidecar"] [[package]] name = "drf-spectacular-sidecar" -version = "2024.3.4" +version = "2024.4.1" description = "Serve self-contained distribution builds of Swagger UI and Redoc with Django" optional = false python-versions = ">=3.6" files = [ - {file = "drf-spectacular-sidecar-2024.3.4.tar.gz", hash = "sha256:101449802421606a2de8be0e27c52e5e7bae14a6d99e1a7ab27eddb659fb8676"}, - {file = "drf_spectacular_sidecar-2024.3.4-py3-none-any.whl", hash = "sha256:71db685ab4fae50f33261c86f5cfd1ae9b3cca72bc0426ed91868121d041be24"}, + {file = "drf-spectacular-sidecar-2024.4.1.tar.gz", hash = "sha256:68532dd094714f79c1775c00848f22c10f004826abc856442ff30c3bc9c40bb4"}, + {file = "drf_spectacular_sidecar-2024.4.1-py3-none-any.whl", hash = "sha256:8359befe69a8953fea86be01c1ff37038854a62546225551de16c47c07dccd4e"}, ] [package.dependencies] @@ -1363,20 +1363,21 @@ smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.42" +version = "3.1.43" description = "GitPython is a Python library used to interact with Git repositories" optional = false python-versions = ">=3.7" files = [ - {file = "GitPython-3.1.42-py3-none-any.whl", hash = "sha256:1bf9cd7c9e7255f77778ea54359e54ac22a72a5b51288c457c881057b7bb9ecd"}, - {file = "GitPython-3.1.42.tar.gz", hash = "sha256:2d99869e0fef71a73cbd242528105af1d6c1b108c60dfabd994bf292f76c3ceb"}, + {file = "GitPython-3.1.43-py3-none-any.whl", hash = "sha256:eec7ec56b92aad751f9912a73404bc02ba212a23adb2c7098ee668417051a1ff"}, + {file = "GitPython-3.1.43.tar.gz", hash = "sha256:35f314a9f878467f5453cc1fee295c3e18e52f1b99f10f6cf5b1682e968a9e7c"}, ] [package.dependencies] gitdb = ">=4.0.1,<5" [package.extras] -test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar"] +doc = ["sphinx (==4.3.2)", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinxcontrib-applehelp (>=1.0.2,<=1.0.4)", "sphinxcontrib-devhelp (==1.0.2)", "sphinxcontrib-htmlhelp (>=2.0.0,<=2.0.1)", "sphinxcontrib-qthelp (==1.0.3)", "sphinxcontrib-serializinghtml (==1.1.5)"] +test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"] [[package]] name = "gprof2dot" @@ -1850,96 +1851,132 @@ zookeeper = ["kazoo (>=2.8.0)"] [[package]] name = "lxml" -version = "5.1.0" +version = "5.2.0" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = false python-versions = ">=3.6" files = [ - {file = "lxml-5.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:704f5572ff473a5f897745abebc6df40f22d4133c1e0a1f124e4f2bd3330ff7e"}, - {file = "lxml-5.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9d3c0f8567ffe7502d969c2c1b809892dc793b5d0665f602aad19895f8d508da"}, - {file = "lxml-5.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5fcfbebdb0c5d8d18b84118842f31965d59ee3e66996ac842e21f957eb76138c"}, - {file = "lxml-5.1.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f37c6d7106a9d6f0708d4e164b707037b7380fcd0b04c5bd9cae1fb46a856fb"}, - {file = "lxml-5.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2befa20a13f1a75c751f47e00929fb3433d67eb9923c2c0b364de449121f447c"}, - {file = "lxml-5.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22b7ee4c35f374e2c20337a95502057964d7e35b996b1c667b5c65c567d2252a"}, - {file = "lxml-5.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bf8443781533b8d37b295016a4b53c1494fa9a03573c09ca5104550c138d5c05"}, - {file = "lxml-5.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:82bddf0e72cb2af3cbba7cec1d2fd11fda0de6be8f4492223d4a268713ef2147"}, - {file = "lxml-5.1.0-cp310-cp310-win32.whl", hash = "sha256:b66aa6357b265670bb574f050ffceefb98549c721cf28351b748be1ef9577d93"}, - {file = "lxml-5.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:4946e7f59b7b6a9e27bef34422f645e9a368cb2be11bf1ef3cafc39a1f6ba68d"}, - {file = "lxml-5.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:14deca1460b4b0f6b01f1ddc9557704e8b365f55c63070463f6c18619ebf964f"}, - {file = "lxml-5.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ed8c3d2cd329bf779b7ed38db176738f3f8be637bb395ce9629fc76f78afe3d4"}, - {file = "lxml-5.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:436a943c2900bb98123b06437cdd30580a61340fbdb7b28aaf345a459c19046a"}, - {file = "lxml-5.1.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:acb6b2f96f60f70e7f34efe0c3ea34ca63f19ca63ce90019c6cbca6b676e81fa"}, - {file = "lxml-5.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af8920ce4a55ff41167ddbc20077f5698c2e710ad3353d32a07d3264f3a2021e"}, - {file = "lxml-5.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cfced4a069003d8913408e10ca8ed092c49a7f6cefee9bb74b6b3e860683b45"}, - {file = "lxml-5.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9e5ac3437746189a9b4121db2a7b86056ac8786b12e88838696899328fc44bb2"}, - {file = "lxml-5.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f4c9bda132ad108b387c33fabfea47866af87f4ea6ffb79418004f0521e63204"}, - {file = "lxml-5.1.0-cp311-cp311-win32.whl", hash = "sha256:bc64d1b1dab08f679fb89c368f4c05693f58a9faf744c4d390d7ed1d8223869b"}, - {file = "lxml-5.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:a5ab722ae5a873d8dcee1f5f45ddd93c34210aed44ff2dc643b5025981908cda"}, - {file = "lxml-5.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9aa543980ab1fbf1720969af1d99095a548ea42e00361e727c58a40832439114"}, - {file = "lxml-5.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6f11b77ec0979f7e4dc5ae081325a2946f1fe424148d3945f943ceaede98adb8"}, - {file = "lxml-5.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a36c506e5f8aeb40680491d39ed94670487ce6614b9d27cabe45d94cd5d63e1e"}, - {file = "lxml-5.1.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f643ffd2669ffd4b5a3e9b41c909b72b2a1d5e4915da90a77e119b8d48ce867a"}, - {file = "lxml-5.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16dd953fb719f0ffc5bc067428fc9e88f599e15723a85618c45847c96f11f431"}, - {file = "lxml-5.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16018f7099245157564d7148165132c70adb272fb5a17c048ba70d9cc542a1a1"}, - {file = "lxml-5.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:82cd34f1081ae4ea2ede3d52f71b7be313756e99b4b5f829f89b12da552d3aa3"}, - {file = "lxml-5.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:19a1bc898ae9f06bccb7c3e1dfd73897ecbbd2c96afe9095a6026016e5ca97b8"}, - {file = "lxml-5.1.0-cp312-cp312-win32.whl", hash = "sha256:13521a321a25c641b9ea127ef478b580b5ec82aa2e9fc076c86169d161798b01"}, - {file = "lxml-5.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:1ad17c20e3666c035db502c78b86e58ff6b5991906e55bdbef94977700c72623"}, - {file = "lxml-5.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:24ef5a4631c0b6cceaf2dbca21687e29725b7c4e171f33a8f8ce23c12558ded1"}, - {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d2900b7f5318bc7ad8631d3d40190b95ef2aa8cc59473b73b294e4a55e9f30f"}, - {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:601f4a75797d7a770daed8b42b97cd1bb1ba18bd51a9382077a6a247a12aa38d"}, - {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4b68c961b5cc402cbd99cca5eb2547e46ce77260eb705f4d117fd9c3f932b95"}, - {file = "lxml-5.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:afd825e30f8d1f521713a5669b63657bcfe5980a916c95855060048b88e1adb7"}, - {file = "lxml-5.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:262bc5f512a66b527d026518507e78c2f9c2bd9eb5c8aeeb9f0eb43fcb69dc67"}, - {file = "lxml-5.1.0-cp36-cp36m-win32.whl", hash = "sha256:e856c1c7255c739434489ec9c8aa9cdf5179785d10ff20add308b5d673bed5cd"}, - {file = "lxml-5.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:c7257171bb8d4432fe9d6fdde4d55fdbe663a63636a17f7f9aaba9bcb3153ad7"}, - {file = "lxml-5.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b9e240ae0ba96477682aa87899d94ddec1cc7926f9df29b1dd57b39e797d5ab5"}, - {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a96f02ba1bcd330807fc060ed91d1f7a20853da6dd449e5da4b09bfcc08fdcf5"}, - {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3898ae2b58eeafedfe99e542a17859017d72d7f6a63de0f04f99c2cb125936"}, - {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61c5a7edbd7c695e54fca029ceb351fc45cd8860119a0f83e48be44e1c464862"}, - {file = "lxml-5.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3aeca824b38ca78d9ee2ab82bd9883083d0492d9d17df065ba3b94e88e4d7ee6"}, - {file = "lxml-5.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8f52fe6859b9db71ee609b0c0a70fea5f1e71c3462ecf144ca800d3f434f0764"}, - {file = "lxml-5.1.0-cp37-cp37m-win32.whl", hash = "sha256:d42e3a3fc18acc88b838efded0e6ec3edf3e328a58c68fbd36a7263a874906c8"}, - {file = "lxml-5.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:eac68f96539b32fce2c9b47eb7c25bb2582bdaf1bbb360d25f564ee9e04c542b"}, - {file = "lxml-5.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ae15347a88cf8af0949a9872b57a320d2605ae069bcdf047677318bc0bba45b1"}, - {file = "lxml-5.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c26aab6ea9c54d3bed716b8851c8bfc40cb249b8e9880e250d1eddde9f709bf5"}, - {file = "lxml-5.1.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:342e95bddec3a698ac24378d61996b3ee5ba9acfeb253986002ac53c9a5f6f84"}, - {file = "lxml-5.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:725e171e0b99a66ec8605ac77fa12239dbe061482ac854d25720e2294652eeaa"}, - {file = "lxml-5.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d184e0d5c918cff04cdde9dbdf9600e960161d773666958c9d7b565ccc60c45"}, - {file = "lxml-5.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:98f3f020a2b736566c707c8e034945c02aa94e124c24f77ca097c446f81b01f1"}, - {file = "lxml-5.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d48fc57e7c1e3df57be5ae8614bab6d4e7b60f65c5457915c26892c41afc59e"}, - {file = "lxml-5.1.0-cp38-cp38-win32.whl", hash = "sha256:7ec465e6549ed97e9f1e5ed51c657c9ede767bc1c11552f7f4d022c4df4a977a"}, - {file = "lxml-5.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:b21b4031b53d25b0858d4e124f2f9131ffc1530431c6d1321805c90da78388d1"}, - {file = "lxml-5.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:52427a7eadc98f9e62cb1368a5079ae826f94f05755d2d567d93ee1bc3ceb354"}, - {file = "lxml-5.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6a2a2c724d97c1eb8cf966b16ca2915566a4904b9aad2ed9a09c748ffe14f969"}, - {file = "lxml-5.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:843b9c835580d52828d8f69ea4302537337a21e6b4f1ec711a52241ba4a824f3"}, - {file = "lxml-5.1.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b99f564659cfa704a2dd82d0684207b1aadf7d02d33e54845f9fc78e06b7581"}, - {file = "lxml-5.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f8b0c78e7aac24979ef09b7f50da871c2de2def043d468c4b41f512d831e912"}, - {file = "lxml-5.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bcf86dfc8ff3e992fed847c077bd875d9e0ba2fa25d859c3a0f0f76f07f0c8d"}, - {file = "lxml-5.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:49a9b4af45e8b925e1cd6f3b15bbba2c81e7dba6dce170c677c9cda547411e14"}, - {file = "lxml-5.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:280f3edf15c2a967d923bcfb1f8f15337ad36f93525828b40a0f9d6c2ad24890"}, - {file = "lxml-5.1.0-cp39-cp39-win32.whl", hash = "sha256:ed7326563024b6e91fef6b6c7a1a2ff0a71b97793ac33dbbcf38f6005e51ff6e"}, - {file = "lxml-5.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:8d7b4beebb178e9183138f552238f7e6613162a42164233e2bda00cb3afac58f"}, - {file = "lxml-5.1.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9bd0ae7cc2b85320abd5e0abad5ccee5564ed5f0cc90245d2f9a8ef330a8deae"}, - {file = "lxml-5.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8c1d679df4361408b628f42b26a5d62bd3e9ba7f0c0e7969f925021554755aa"}, - {file = "lxml-5.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2ad3a8ce9e8a767131061a22cd28fdffa3cd2dc193f399ff7b81777f3520e372"}, - {file = "lxml-5.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:304128394c9c22b6569eba2a6d98392b56fbdfbad58f83ea702530be80d0f9df"}, - {file = "lxml-5.1.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d74fcaf87132ffc0447b3c685a9f862ffb5b43e70ea6beec2fb8057d5d2a1fea"}, - {file = "lxml-5.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:8cf5877f7ed384dabfdcc37922c3191bf27e55b498fecece9fd5c2c7aaa34c33"}, - {file = "lxml-5.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:877efb968c3d7eb2dad540b6cabf2f1d3c0fbf4b2d309a3c141f79c7e0061324"}, - {file = "lxml-5.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f14a4fb1c1c402a22e6a341a24c1341b4a3def81b41cd354386dcb795f83897"}, - {file = "lxml-5.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:25663d6e99659544ee8fe1b89b1a8c0aaa5e34b103fab124b17fa958c4a324a6"}, - {file = "lxml-5.1.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8b9f19df998761babaa7f09e6bc169294eefafd6149aaa272081cbddc7ba4ca3"}, - {file = "lxml-5.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e53d7e6a98b64fe54775d23a7c669763451340c3d44ad5e3a3b48a1efbdc96f"}, - {file = "lxml-5.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c3cd1fc1dc7c376c54440aeaaa0dcc803d2126732ff5c6b68ccd619f2e64be4f"}, - {file = "lxml-5.1.0.tar.gz", hash = "sha256:3eea6ed6e6c918e468e693c41ef07f3c3acc310b70ddd9cc72d9ef84bc9564ca"}, + {file = "lxml-5.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c54f8d6160080831a76780d850302fdeb0e8d0806f661777b0714dfb55d9a08a"}, + {file = "lxml-5.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0e95ae029396382a0d2e8174e4077f96befcd4a2184678db363ddc074eb4d3b2"}, + {file = "lxml-5.2.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5810fa80e64a0c689262a71af999c5735f48c0da0affcbc9041d1ef5ef3920be"}, + {file = "lxml-5.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae69524fd6a68b288574013f8fadac23cacf089c75cd3fc5b216277a445eb736"}, + {file = "lxml-5.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fadda215e32fe375d65e560b7f7e2a37c7f9c4ecee5315bb1225ca6ac9bf5838"}, + {file = "lxml-5.2.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:f1f164e4cc6bc646b1fc86664c3543bf4a941d45235797279b120dc740ee7af5"}, + {file = "lxml-5.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:3603a8a41097daf7672cae22cc4a860ab9ea5597f1c5371cb21beca3398b8d6a"}, + {file = "lxml-5.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b3b4bb89a785f4fd60e05f3c3a526c07d0d68e3536f17f169ca13bf5b5dd75a5"}, + {file = "lxml-5.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1effc10bf782f0696e76ecfeba0720ea02c0c31d5bffb7b29ba10debd57d1c3d"}, + {file = "lxml-5.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b03531f6cd6ce4b511dcece060ca20aa5412f8db449274b44f4003f282e6272f"}, + {file = "lxml-5.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7fac15090bb966719df06f0c4f8139783746d1e60e71016d8a65db2031ca41b8"}, + {file = "lxml-5.2.0-cp310-cp310-win32.whl", hash = "sha256:92bb37c96215c4b2eb26f3c791c0bf02c64dd251effa532b43ca5049000c4478"}, + {file = "lxml-5.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:b0181c22fdb89cc19e70240a850e5480817c3e815b1eceb171b3d7a3aa3e596a"}, + {file = "lxml-5.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ada8ce9e6e1d126ef60d215baaa0c81381ba5841c25f1d00a71cdafdc038bd27"}, + {file = "lxml-5.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3cefb133c859f06dab2ae63885d9f405000c4031ec516e0ed4f9d779f690d8e3"}, + {file = "lxml-5.2.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ede2a7a86a977b0c741654efaeca0af7860a9b1ae39f9268f0936246a977ee0"}, + {file = "lxml-5.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d46df6f0b1a0cda39d12c5c4615a7d92f40342deb8001c7b434d7c8c78352e58"}, + {file = "lxml-5.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2259243ee734cc736e237719037efb86603c891fd363cc7973a2d0ac8a0e3f"}, + {file = "lxml-5.2.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:c53164f29ed3c3868787144e8ea8a399ffd7d8215f59500a20173593c19e96eb"}, + {file = "lxml-5.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:371aab9a397dcc76625ad3b02fa9b21be63406d69237b773156e7d1fc2ce0cae"}, + {file = "lxml-5.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e08784288a179b59115b5e57abf6d387528b39abb61105fe17510a199a277a40"}, + {file = "lxml-5.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4c232726f7b6df5143415a06323faaa998ef8abbe1c0ed00d718755231d76f08"}, + {file = "lxml-5.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e4366e58c0508da4dee4c7c70cee657e38553d73abdffa53abbd7d743711ee11"}, + {file = "lxml-5.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c84dce8fb2e900d4fb094e76fdad34a5fd06de53e41bddc1502c146eb11abd74"}, + {file = "lxml-5.2.0-cp311-cp311-win32.whl", hash = "sha256:0947d1114e337dc2aae2fa14bbc9ed5d9ca1a0acd6d2f948df9926aef65305e9"}, + {file = "lxml-5.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:1eace37a9f4a1bef0bb5c849434933fd6213008ec583c8e31ee5b8e99c7c8500"}, + {file = "lxml-5.2.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f2cb157e279d28c66b1c27e0948687dc31dc47d1ab10ce0cd292a8334b7de3d5"}, + {file = "lxml-5.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:53c0e56f41ef68c1ce4e96f27ecdc2df389730391a2fd45439eb3facb02d36c8"}, + {file = "lxml-5.2.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:703d60e59ab45c17485c2c14b11880e4f7f0eab07134afa9007573fa5a779a5a"}, + {file = "lxml-5.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaf5e308a5e50bc0548c4fdca0117a31ec9596f8cfc96592db170bcecc71a957"}, + {file = "lxml-5.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af64df85fecd3cf3b2e792f0b5b4d92740905adfa8ce3b24977a55415f1a0c40"}, + {file = "lxml-5.2.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:df7dfbdef11702fd22c2eaf042d7098d17edbc62d73f2199386ad06cbe466f6d"}, + {file = "lxml-5.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:7250030a7835bfd5ba6ca7d1ad483ec90f9cbc29978c5e75c1cc3e031d3c4160"}, + {file = "lxml-5.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:be5faa2d5c8c8294d770cfd09d119fb27b5589acc59635b0cf90f145dbe81dca"}, + {file = "lxml-5.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:347ec08250d5950f5b016caa3e2e13fb2cb9714fe6041d52e3716fb33c208663"}, + {file = "lxml-5.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:dc7b630c4fb428b8a40ddd0bfc4bc19de11bb3c9b031154f77360e48fe8b4451"}, + {file = "lxml-5.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ae550cbd7f229cdf2841d9b01406bcca379a5fb327b9efb53ba620a10452e835"}, + {file = "lxml-5.2.0-cp312-cp312-win32.whl", hash = "sha256:7c61ce3cdd6e6c9f4003ac118be7eb3036d0ce2afdf23929e533e54482780f74"}, + {file = "lxml-5.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:f90c36ca95a44d2636bbf55a51ca30583b59b71b6547b88d954e029598043551"}, + {file = "lxml-5.2.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:1cce2eaad7e38b985b0f91f18468dda0d6b91862d32bec945b0e46e2ffe7222e"}, + {file = "lxml-5.2.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:60a3983d32f722a8422c01e4dc4badc7a307ca55c59e2485d0e14244a52c482f"}, + {file = "lxml-5.2.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60847dfbdfddf08a56c4eefe48234e8c1ab756c7eda4a2a7c1042666a5516564"}, + {file = "lxml-5.2.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bbe335f0d1a86391671d975a1b5e9b08bb72fba6b567c43bdc2e55ca6e6c086"}, + {file = "lxml-5.2.0-cp36-cp36m-manylinux_2_28_aarch64.whl", hash = "sha256:3ac7c8a60b8ad51fe7bca99a634dd625d66492c502fd548dc6dc769ce7d94b6a"}, + {file = "lxml-5.2.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:73e69762cf740ac3ae81137ef9d6f15f93095f50854e233d50b29e7b8a91dbc6"}, + {file = "lxml-5.2.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:281ee1ffeb0ab06204dfcd22a90e9003f0bb2dab04101ad983d0b1773bc10588"}, + {file = "lxml-5.2.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ba3a86b0d5a5c93104cb899dff291e3ae13729c389725a876d00ef9696de5425"}, + {file = "lxml-5.2.0-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:356f8873b1e27b81793e30144229adf70f6d3e36e5cb7b6d289da690f4398953"}, + {file = "lxml-5.2.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:2a34e74ffe92c413f197ff4967fb1611d938ee0691b762d062ef0f73814f3aa4"}, + {file = "lxml-5.2.0-cp36-cp36m-win32.whl", hash = "sha256:6f0d2b97a5a06c00c963d4542793f3e486b1ed3a957f8c19f6006ed39d104bb0"}, + {file = "lxml-5.2.0-cp36-cp36m-win_amd64.whl", hash = "sha256:35e39c6fd089ad6674eb52d93aa874d6027b3ae44d2381cca6e9e4c2e102c9c8"}, + {file = "lxml-5.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5f6e4e5a62114ae76690c4a04c5108d067442d0a41fd092e8abd25af1288c450"}, + {file = "lxml-5.2.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93eede9bcc842f891b2267c7f0984d811940d1bc18472898a1187fe560907a99"}, + {file = "lxml-5.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad364026c2cebacd7e01d1138bd53639822fefa8f7da90fc38cd0e6319a2699"}, + {file = "lxml-5.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f06e4460e76468d99cc36d5b9bc6fc5f43e6662af44960e13e3f4e040aacb35"}, + {file = "lxml-5.2.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:ca3236f31d565555139d5b00b790ed2a98ac6f0c4470c4032f8b5e5a5dba3c1a"}, + {file = "lxml-5.2.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:a9b67b850ab1d304cb706cf71814b0e0c3875287083d7ec55ee69504a9c48180"}, + {file = "lxml-5.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5261c858c390ae9a19aba96796948b6a2d56649cbd572968970dc8da2b2b2a42"}, + {file = "lxml-5.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e8359fb610c8c444ac473cfd82dae465f405ff807cabb98a9b9712bbd0028751"}, + {file = "lxml-5.2.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:f9e27841cddfaebc4e3ffbe5dbdff42891051acf5befc9f5323944b2c61cef16"}, + {file = "lxml-5.2.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:641a8da145aca67671205f3e89bfec9815138cf2fe06653c909eab42e486d373"}, + {file = "lxml-5.2.0-cp37-cp37m-win32.whl", hash = "sha256:931a3a13e0f574abce8f3152b207938a54304ccf7a6fd7dff1fdb2f6691d08af"}, + {file = "lxml-5.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:246c93e2503c710cf02c7e9869dc0258223cbefe5e8f9ecded0ac0aa07fd2bf8"}, + {file = "lxml-5.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:11acfcdf5a38cf89c48662123a5d02ae0a7d99142c7ee14ad90de5c96a9b6f06"}, + {file = "lxml-5.2.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:200f70b5d95fc79eb9ed7f8c4888eef4e274b9bf380b829d3d52e9ed962e9231"}, + {file = "lxml-5.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba4d02aed47c25be6775a40d55c5774327fdedba79871b7c2485e80e45750cb2"}, + {file = "lxml-5.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e283b24c14361fe9e04026a1d06c924450415491b83089951d469509900d9f32"}, + {file = "lxml-5.2.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:03e3962d6ad13a862dacd5b3a3ea60b4d092a550f36465234b8639311fd60989"}, + {file = "lxml-5.2.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6e45fd5213e5587a610b7e7c8c5319a77591ab21ead42df46bb342e21bc1418d"}, + {file = "lxml-5.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:27877732946843f4b6bfc56eb40d865653eef34ad2edeed16b015d5c29c248df"}, + {file = "lxml-5.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4d16b44ad0dd8c948129639e34c8d301ad87ebc852568ace6fe9a5ad9ce67ee1"}, + {file = "lxml-5.2.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:b8f842df9ba26135c5414e93214e04fe0af259bb4f96a32f756f89467f7f3b45"}, + {file = "lxml-5.2.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c74e77df9e36c8c91157853e6cd400f6f9ca7a803ba89981bfe3f3fc7e5651ef"}, + {file = "lxml-5.2.0-cp38-cp38-win32.whl", hash = "sha256:1459a998c10a99711ac532abe5cc24ba354e4396dafef741c7797f8830712d56"}, + {file = "lxml-5.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:a00f5931b7cccea775123c3c0a2513aee58afdad8728550cc970bff32280bdd2"}, + {file = "lxml-5.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ddda5ba8831f258ac7e6364be03cb27aa62f50c67fd94bc1c3b6247959cc0369"}, + {file = "lxml-5.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:56835b9e9a7767202fae06310c6b67478963e535fe185bed3bf9af5b18d2b67e"}, + {file = "lxml-5.2.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25fef8794f0dc89f01bdd02df6a7fec4bcb2fbbe661d571e898167a83480185e"}, + {file = "lxml-5.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32d44af078485c4da9a7ec460162392d49d996caf89516fa0b75ad0838047122"}, + {file = "lxml-5.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f354d62345acdf22aa3e171bd9723790324a66fafe61bfe3873b86724cf6daaa"}, + {file = "lxml-5.2.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6a7e0935f05e1cf1a3aa1d49a87505773b04f128660eac2a24a5594ea6b1baa7"}, + {file = "lxml-5.2.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:75a4117b43694c72a0d89f6c18a28dc57407bde4650927d4ef5fd384bdf6dcc7"}, + {file = "lxml-5.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:57402d6cdd8a897ce21cf8d1ff36683583c17a16322a321184766c89a1980600"}, + {file = "lxml-5.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:56591e477bea531e5e1854f5dfb59309d5708669bc921562a35fd9ca5182bdcd"}, + {file = "lxml-5.2.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7efbce96719aa275d49ad5357886845561328bf07e1d5ab998f4e3066c5ccf15"}, + {file = "lxml-5.2.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a3c39def0965e8fb5c8d50973e0c7b4ce429a2fa730f3f9068a7f4f9ce78410b"}, + {file = "lxml-5.2.0-cp39-cp39-win32.whl", hash = "sha256:5188f22c00381cb44283ecb28c8d85c2db4a3035774dd851876c8647cb809c27"}, + {file = "lxml-5.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:ed1fe80e1fcdd1205a443bddb1ad3c3135bb1cd3f36cc996a1f4aed35960fbe8"}, + {file = "lxml-5.2.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d2b339fb790fc923ae2e9345c8633e3d0064d37ea7920c027f20c8ae6f65a91f"}, + {file = "lxml-5.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06036d60fccb21e22dd167f6d0e422b9cbdf3588a7e999a33799f9cbf01e41a5"}, + {file = "lxml-5.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a1611fb9de0a269c05575c024e6d8cdf2186e3fa52b364e3b03dcad82514d57"}, + {file = "lxml-5.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:05fc3720250d221792b6e0d150afc92d20cb10c9cdaa8c8f93c2a00fbdd16015"}, + {file = "lxml-5.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:11e41ffd3cd27b0ca1c76073b27bd860f96431d9b70f383990f1827ca19f2f52"}, + {file = "lxml-5.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0382e6a3eefa3f6699b14fa77c2eb32af2ada261b75120eaf4fc028a20394975"}, + {file = "lxml-5.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:be5c8e776ecbcf8c1bce71a7d90e3a3680c9ceae516cac0be08b47e9fac0ca43"}, + {file = "lxml-5.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da12b4efc93d53068888cb3b58e355b31839f2428b8f13654bd25d68b201c240"}, + {file = "lxml-5.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f46f8033da364bacc74aca5e319509a20bb711c8a133680ca5f35020f9eaf025"}, + {file = "lxml-5.2.0-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:50a26f68d090594477df8572babac64575cd5c07373f7a8319c527c8e56c0f99"}, + {file = "lxml-5.2.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:57cbadf028727705086047994d2e50124650e63ce5a035b0aa79ab50f001989f"}, + {file = "lxml-5.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:8aa11638902ac23f944f16ce45c9f04c9d5d57bb2da66822abb721f4efe5fdbb"}, + {file = "lxml-5.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b7150e630b879390e02121e71ceb1807f682b88342e2ea2082e2c8716cf8bd93"}, + {file = "lxml-5.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4add722393c99da4d51c8d9f3e1ddf435b30677f2d9ba9aeaa656f23c1b7b580"}, + {file = "lxml-5.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd0f25a431cd16f70ec1c47c10b413e7ddfe1ccaaddd1a7abd181e507c012374"}, + {file = "lxml-5.2.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:883e382695f346c2ea3ad96bdbdf4ca531788fbeedb4352be3a8fcd169fc387d"}, + {file = "lxml-5.2.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:80cc2b55bb6e35d3cb40936b658837eb131e9f16357241cd9ba106ae1e9c5ecb"}, + {file = "lxml-5.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:59ec2948385336e9901008fdf765780fe30f03e7fdba8090aafdbe5d1b7ea0cd"}, + {file = "lxml-5.2.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ddbea6e58cce1a640d9d65947f1e259423fc201c9cf9761782f355f53b7f3097"}, + {file = "lxml-5.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52d6cdea438eb7282c41c5ac00bd6d47d14bebb6e8a8d2a1c168ed9e0cacfbab"}, + {file = "lxml-5.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c556bbf88a8b667c849d326dd4dd9c6290ede5a33383ffc12b0ed17777f909d"}, + {file = "lxml-5.2.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:947fa8bf15d1c62c6db36c6ede9389cac54f59af27010251747f05bddc227745"}, + {file = "lxml-5.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e6cb8f7a332eaa2d876b649a748a445a38522e12f2168e5e838d1505a91cdbb7"}, + {file = "lxml-5.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:16e65223f34fd3d65259b174f0f75a4bb3d9893698e5e7d01e54cd8c5eb98d85"}, + {file = "lxml-5.2.0.tar.gz", hash = "sha256:21dc490cdb33047bc7f7ad76384f3366fa8f5146b86cc04c4af45de901393b90"}, ] [package.extras] cssselect = ["cssselect (>=0.7)"] +html-clean = ["lxml-html-clean"] html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=3.0.7)"] +source = ["Cython (>=3.0.10)"] [[package]] name = "markdown" @@ -2276,18 +2313,18 @@ typing-extensions = ">=4.3.0" [[package]] name = "nautobot" -version = "2.2.0b1" +version = "2.2.0" description = "Source of truth and network automation platform." optional = false python-versions = "<3.12,>=3.8" files = [ - {file = "nautobot-2.2.0b1-py3-none-any.whl", hash = "sha256:cf91cf2dbc880a2491b45e943d6fae6b546dbbb2836c311e029131d177363220"}, - {file = "nautobot-2.2.0b1.tar.gz", hash = "sha256:ba5eca68e882af7e364d8fd3194e83d500f4bf668c5bde216c86ec5a4d11bd82"}, + {file = "nautobot-2.2.0-py3-none-any.whl", hash = "sha256:918881373371661ee4fefb3177b8f28a86068164085b1383cc84966f913eca46"}, + {file = "nautobot-2.2.0.tar.gz", hash = "sha256:2232f8296d0b78885e02ab055d0b15e3a6303f633a0d0952c84c76f5978f9b4f"}, ] [package.dependencies] celery = ">=5.3.1,<5.4.0" -Django = ">=3.2.24,<3.3.0" +Django = ">=3.2.25,<3.3.0" django-ajax-tables = ">=1.1.1,<1.2.0" django-celery-beat = ">=2.5.0,<2.6.0" django-celery-results = ">=2.4.0,<2.5.0" @@ -2331,12 +2368,12 @@ social-auth-app-django = ">=5.2.0,<5.3.0" svgwrite = ">=1.4.2,<1.5.0" [package.extras] -all = ["django-auth-ldap (>=4.3.0,<4.4.0)", "django-storages (>=1.13.2,<1.14.0)", "mysqlclient (>=2.2.3,<2.3.0)", "napalm (>=4.1.0,<4.2.0)", "social-auth-core[openidconnect,saml] (>=4.4.2,<4.5.0)"] +all = ["django-auth-ldap (>=4.3.0,<4.4.0)", "django-storages (>=1.13.2,<1.14.0)", "mysqlclient (>=2.2.3,<2.3.0)", "napalm (>=4.1.0,<4.2.0)", "social-auth-core[openidconnect,saml] (>=4.5.3,<4.6.0)"] ldap = ["django-auth-ldap (>=4.3.0,<4.4.0)"] mysql = ["mysqlclient (>=2.2.3,<2.3.0)"] napalm = ["napalm (>=4.1.0,<4.2.0)"] remote-storage = ["django-storages (>=1.13.2,<1.14.0)"] -sso = ["social-auth-core[openidconnect,saml] (>=4.4.2,<4.5.0)"] +sso = ["social-auth-core[openidconnect,saml] (>=4.5.3,<4.6.0)"] [[package]] name = "nautobot-plugin-nornir" @@ -2983,13 +3020,13 @@ files = [ [[package]] name = "pycparser" -version = "2.21" +version = "2.22" description = "C parser in Python" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.8" files = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] [[package]] diff --git a/pyproject.toml b/pyproject.toml index ec19a8a1..3eece028 100755 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nautobot-device-onboarding" -version = "3.0.2a10" +version = "3.0.2a11" description = "A app for Nautobot to easily onboard new devices." authors = ["Network to Code, LLC "] license = "Apache-2.0" From 1131e1066277c9cebc5e12a82acf558bafd9c30e Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Mon, 1 Apr 2024 19:03:02 +0000 Subject: [PATCH 181/225] black --- nautobot_device_onboarding/utils/formatter.py | 24 ++++++++++--------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index b37d53fb..6d1914a7 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -136,8 +136,6 @@ def format_ios_results(device): mac_list = device.get("mac_address", []) description_list = device.get("description", []) link_status_list = device.get("link_status", []) - #vrf_list = device.get("vrf", []) - #print(f"vrf_list: {vrf_list}") interface_dict = {} for item in mtu_list: @@ -156,7 +154,9 @@ def format_ios_results(device): for item in description_list: interface_dict.setdefault(item["interface"], {})["description"] = item["description"] for item in link_status_list: - interface_dict.setdefault(item["interface"], {})["link_status"] = True if item["link_status"] == "up" else False + interface_dict.setdefault(item["interface"], {})["link_status"] = ( + True if item["link_status"] == "up" else False + ) for interface in interface_dict.values(): interface.setdefault("802.1Q_mode", "") interface.setdefault("lag", "") @@ -186,8 +186,8 @@ def format_ios_results(device): except KeyError: pass - except Error as e: - device = { "failed": True, "failed_reason": f"Formatting error for device {device}"} + except Exception: + device = {"failed": True, "failed_reason": f"Formatting error for device {device}"} return device @@ -221,7 +221,9 @@ def format_nxos_results(device): for item in description_list: interface_dict.setdefault(item["interface"], {})["description"] = item["description"] for item in link_status_list: - interface_dict.setdefault(item["interface"], {})["link_status"] = True if item["link_status"] == "up" else False + interface_dict.setdefault(item["interface"], {})["link_status"] = ( + True if item["link_status"] == "up" else False + ) for item in mode_list: interface_dict.setdefault(item["interface"], {})["802.1Q_mode"] = ( "access" if item["mode"] == "access" else "tagged" if item["mode"] == "trunk" else "" @@ -256,10 +258,10 @@ def format_nxos_results(device): del device["mode"] except KeyError: pass - except Error as e: - device = { "failed": True, "failed_reason": f"Formatting error for device {device}"} + except Exception: + device = {"failed": True, "failed_reason": f"Formatting error for device {device}"} return device - + return device @@ -287,8 +289,8 @@ def format_results(compiled_results): if platform in ["cisco_ios", "cisco_xe"]: format_ios_results(data) elif platform == "cisco_nxos": - format_nxos_results(data) + format_nxos_results(data) else: data.update({"failed": True, "failed_reason": "Cannot connect to device."}) - + return compiled_results From feca83d82c707493ff9da74623dfacc9d0b322cf Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Mon, 1 Apr 2024 19:14:48 +0000 Subject: [PATCH 182/225] yamlint --- .../command_mappers/cisco_ios.yml | 29 ------------------- .../command_mappers/juniper_junos.yml | 2 +- .../nornir_plays/processor.py | 4 ++- 3 files changed, 4 insertions(+), 31 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/cisco_ios.yml b/nautobot_device_onboarding/command_mappers/cisco_ios.yml index 84ddb8ef..e9e368b5 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_ios.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_ios.yml @@ -75,32 +75,3 @@ network_importer: - command: "show interfaces" use_textfsm: true jpath: "[*].{interface: interface, link_status: link_status}" - # mode: - # commands: - # - command: "show interfaces switchport" - # use_textfsm: true - # jpath: "[*].{interface: interface, mode: admin_mode}" - # dot1q_mode: - # commands: - # # - command: "show interfaces switchport" - # # use_textfsm: true - # # jpath: "[*].mode" - # - command: "show interfaces switchport" - # use_textfsm: true - # jpath: "[*].{interface: interface, dot1q_mode: admin_mode}" - # validator_pattern: "not None" - # # lag: - # # commands: - # # - command: "show etherchannel summary" - # # use_textfsm: true - # # jpath: "[*].protocol" - # # untagged_vlan: - # # commands: - # # - command: "show vlans" - # # use_textfsm: true - # # jpath: "[*].vlan_id" - # # tagged_vlans: - # # commands: - # # - command: "show vlans" - # # use_textfsm: true - # # jpath: "[*].vlan_id" diff --git a/nautobot_device_onboarding/command_mappers/juniper_junos.yml b/nautobot_device_onboarding/command_mappers/juniper_junos.yml index 8543321a..04f62d63 100755 --- a/nautobot_device_onboarding/command_mappers/juniper_junos.yml +++ b/nautobot_device_onboarding/command_mappers/juniper_junos.yml @@ -36,4 +36,4 @@ network_importer: commands: - command: "show version | display json" use_textfsm: false - jpath: '"software-information"[]."host-name"[].data' # yamllint disable-line rule:quoted-strings \ No newline at end of file + jpath: '"software-information"[]."host-name"[].data' # yamllint disable-line rule:quoted-strings diff --git a/nautobot_device_onboarding/nornir_plays/processor.py b/nautobot_device_onboarding/nornir_plays/processor.py index 03617763..63792d59 100755 --- a/nautobot_device_onboarding/nornir_plays/processor.py +++ b/nautobot_device_onboarding/nornir_plays/processor.py @@ -82,7 +82,9 @@ def subtask_instance_completed(self, task: Task, host: Host, result: MultiResult def subtask_instance_started(self, task: Task, host: Host) -> None: # show command start """Processor for logging and data processing on subtask start.""" - self.logger.info(f"subtask_instance_started Subtask starting {task.name}, {task.host}.", extra={"object": task.host}) + self.logger.info( + f"subtask_instance_started Subtask starting {task.name}, {task.host}.", extra={"object": task.host} + ) if not self.data.get(host.name): self.data[host.name] = { "platform": host.platform, From ba39479a8d17648cf018ff1b6ef0f7a5092edd7f Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Tue, 2 Apr 2024 19:12:25 +0000 Subject: [PATCH 183/225] updates for vrf --- .../command_mappers/cisco_ios.yml | 5 ++ .../diffsync/mock_data.py | 9 +++ nautobot_device_onboarding/utils/formatter.py | 55 ++++++++++++------- 3 files changed, 48 insertions(+), 21 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/cisco_ios.yml b/nautobot_device_onboarding/command_mappers/cisco_ios.yml index e9e368b5..4a550896 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_ios.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_ios.yml @@ -75,3 +75,8 @@ network_importer: - command: "show interfaces" use_textfsm: true jpath: "[*].{interface: interface, link_status: link_status}" + vrfs: + commands: + - command: "show vrf" + use_textfsm: true + jpath: "[*].{name: name, default_rd: default_rd, interfaces: interfaces}" diff --git a/nautobot_device_onboarding/diffsync/mock_data.py b/nautobot_device_onboarding/diffsync/mock_data.py index b337c92e..5f1aa100 100644 --- a/nautobot_device_onboarding/diffsync/mock_data.py +++ b/nautobot_device_onboarding/diffsync/mock_data.py @@ -20,6 +20,7 @@ "lag": "", "untagged_vlan": {"name": "vlan60", "id": "60"}, "tagged_vlans": [{"name": "vlan40", "id": "40"}], + "vrf": {"name": "vrf1", "rd": "65000:1"}, } }, { @@ -36,6 +37,7 @@ "lag": "Po2", "untagged_vlan": {}, "tagged_vlans": [], + "vrf": {}, } }, { @@ -53,6 +55,7 @@ "lag": "Po1", "untagged_vlan": {}, "tagged_vlans": [{"name": "vlan40", "id": "40"}, {"name": "vlan50", "id": "50"}], + "vrf": {"name": "mgmt", "rd": "65500:1"}, } }, { @@ -83,6 +86,7 @@ "lag": "", "untagged_vlan": {}, "tagged_vlans": [], + "vrf": {"name": "mgmt", "rd": "65500:1"}, } }, { @@ -97,6 +101,7 @@ "lag": "", "untagged_vlan": "", "tagged_vlans": [], + "vrf": {"name": "mgmt", "rd": "65500:1"}, } }, ], @@ -118,6 +123,7 @@ "lag": "", "untagged_vlan": {"name": "vlan60", "id": "60"}, "tagged_vlans": [{"name": "vlan40", "id": "40"}], + "vrf": {"name": "mgmt", "rd": "65500:1"}, } }, { @@ -134,6 +140,7 @@ "lag": "Po1", "untagged_vlan": {}, "tagged_vlans": [], + "vrf": {}, } }, { @@ -151,6 +158,7 @@ "lag": "Po1", "untagged_vlan": {}, "tagged_vlans": [{"name": "vlan40", "id": "40"}, {"name": "vlan50", "id": "50"}], + "vrf": {"name": "mgmt", "rd": "65500:1"}, } }, { @@ -165,6 +173,7 @@ "lag": "", "untagged_vlan": {}, "tagged_vlans": [], + "vrf": {"name": "mgmt", "rd": "65500:1"}, } }, ], diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index 6d1914a7..6539b064 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -9,6 +9,7 @@ from jdiff import extract_data_from_json from jinja2.sandbox import SandboxedEnvironment from netutils.interface import canonical_interface_name +from nautobot.dcim.models import Device from nautobot_device_onboarding.constants import INTERFACE_TYPE_MAP_STATIC from nautobot_device_onboarding.utils.jinja_filters import fix_interfaces @@ -74,10 +75,10 @@ def perform_data_extraction(host, dict_field, command_info_dict, j2_env, task_re print(f"extracted 1: {extracted_processed}") else: extracted_processed = extracted_value - print(f"extracted 2: {extracted_processed}") - if isinstance(extracted_value, list) and len(extracted_value) == 1: - extracted_processed = extracted_value[0] - print(f"extracted 3: {extracted_processed}") + # print(f"extracted 2: {extracted_processed}") + # if isinstance(extracted_value, list) and len(extracted_value) == 1: + # extracted_processed = extracted_value[0] + # print(f"extracted 3: {extracted_processed}") if command_info_dict.get("validator_pattern"): # temp validator if command_info_dict["validator_pattern"] == "not None": @@ -136,6 +137,7 @@ def format_ios_results(device): mac_list = device.get("mac_address", []) description_list = device.get("description", []) link_status_list = device.get("link_status", []) + vrf_list = device.get("vrfs", []) interface_dict = {} for item in mtu_list: @@ -157,6 +159,11 @@ def format_ios_results(device): interface_dict.setdefault(item["interface"], {})["link_status"] = ( True if item["link_status"] == "up" else False ) + for vrf in vrf_list: + for interface in vrf["interfaces"]: + canonical_name = canonical_interface_name(interface) + interface_dict.setdefault(canonical_name, {}) + interface_dict[canonical_name]["vrf"] = {"name": vrf["name"], "rd": vrf["default_rd"]} for interface in interface_dict.values(): interface.setdefault("802.1Q_mode", "") interface.setdefault("lag", "") @@ -183,9 +190,10 @@ def format_ios_results(device): del device["mac_address"] del device["description"] del device["link_status"] + del device["vrfs"] except KeyError: - pass + device = {"failed": True, "failed_reason": f"Formatting error for device {device}"} except Exception: device = {"failed": True, "failed_reason": f"Formatting error for device {device}"} return device @@ -257,11 +265,9 @@ def format_nxos_results(device): del device["link_status"] del device["mode"] except KeyError: - pass + device = {"failed": True, "failed_reason": f"Formatting error for device {device}"} except Exception: device = {"failed": True, "failed_reason": f"Formatting error for device {device}"} - return device - return device @@ -280,17 +286,24 @@ def format_results(compiled_results): compiled_results (dict): The formatted results. """ for device, data in compiled_results.items(): - if "platform" in data: - platform = data.get("platform") - if platform not in ["cisco_ios", "cisco_xe", "cisco_nxos"]: - print(f"Unsupported platform {platform}") - data.update({"failed": True, "failed_reason": f"Unsupported platform {platform}"}) - if "type" in data: - if platform in ["cisco_ios", "cisco_xe"]: - format_ios_results(data) - elif platform == "cisco_nxos": - format_nxos_results(data) - else: - data.update({"failed": True, "failed_reason": "Cannot connect to device."}) - + try: + if "platform" in data: + platform = data.get("platform") + if platform not in ["cisco_ios", "cisco_xe", "cisco_nxos"]: + data.update({"failed": True, "failed_reason": f"Unsupported platform {platform}"}) + if "type" in data: + + serial = Device.objects.get(name=device).serial + if serial == "": + data.update({"failed": True, "failed_reason": "Serial not found for device in Nautobot."}) + else: + data["serial"] = serial + if platform in ["cisco_ios", "cisco_xe"]: + format_ios_results(data) + elif platform == "cisco_nxos": + format_nxos_results(data) + else: + data.update({"failed": True, "failed_reason": "Cannot connect to device."}) + except Exception as e: + data.update({"failed": True, "failed_reason": f"Error formatting device: {e}"}) return compiled_results From 2d95fe96e713f450dd449d03992642a46aef47b5 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Thu, 4 Apr 2024 10:42:41 -0500 Subject: [PATCH 184/225] revamps and cleanups --- development/nautobot_config.py | 7 +- .../command_mappers/cisco_nxos.yml | 6 +- nautobot_device_onboarding/constants.py | 9 +++ nautobot_device_onboarding/datasources.py | 24 +++++++ nautobot_device_onboarding/helpers.py | 40 ----------- nautobot_device_onboarding/jobs.py | 2 +- .../nornir_plays/command_getter.py | 5 +- .../nornir_plays/empty_inventory.py | 2 +- .../inventory_creator.py | 9 +-- nautobot_device_onboarding/utils/formatter.py | 37 +++++----- nautobot_device_onboarding/utils/helper.py | 70 ++++++++++++++++--- 11 files changed, 130 insertions(+), 81 deletions(-) create mode 100755 nautobot_device_onboarding/datasources.py delete mode 100644 nautobot_device_onboarding/helpers.py rename nautobot_device_onboarding/{utils => nornir_plays}/inventory_creator.py (86%) diff --git a/development/nautobot_config.py b/development/nautobot_config.py index 9456005a..677f04cf 100644 --- a/development/nautobot_config.py +++ b/development/nautobot_config.py @@ -136,7 +136,12 @@ # Apps configuration settings. These settings are used by various Apps that the user may have installed. # Each key in the dictionary is the name of an installed App and its value is a dictionary of settings. PLUGINS_CONFIG = { - "nautobot_device_onboarding": {}, + "nautobot_device_onboarding": { + "custom_post_processing_filters": { + "example_filter_one": "parts.jeff.jeff_interfaces", + # "example_filter_two": "my_package.my_filters.filtertwo" + } + }, "nautobot_ssot": { "hide_example_jobs": is_truthy(os.getenv("NAUTOBOT_SSOT_HIDE_EXAMPLE_JOBS")), }, diff --git a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml index 92a648b1..d02b4a9f 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml @@ -2,9 +2,9 @@ device_onboarding: hostname: commands: - - command: "show version" - use_textfsm: true - jpath: "[*].hostname" + command: "show version" + use_textfsm: true + jpath: "[*].hostname" serial: commands: - command: "show inventory" diff --git a/nautobot_device_onboarding/constants.py b/nautobot_device_onboarding/constants.py index 5f9e308f..16853853 100644 --- a/nautobot_device_onboarding/constants.py +++ b/nautobot_device_onboarding/constants.py @@ -1,5 +1,13 @@ """Constants for nautobot_device_onboarding app.""" +import os +from django.conf import settings + +PLUGIN_CFG = settings.PLUGINS_CONFIG["nautobot_device_onboarding"] + +# DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), "command_mappers")) + +# This should be removed and network_driver mapping should be used instead. NETMIKO_TO_NAPALM_STATIC = { "cisco_ios": "ios", "cisco_xe": "ios", @@ -9,6 +17,7 @@ "cisco_xr": "iosxr", } +# This should potentially be removed and used nautobot core directly choices. INTERFACE_TYPE_MAP_STATIC = { "Gigabit Ethernet": "1000base-t", "Ten Gigabit Ethernet": "10gbase-t", diff --git a/nautobot_device_onboarding/datasources.py b/nautobot_device_onboarding/datasources.py new file mode 100755 index 00000000..5acda3fd --- /dev/null +++ b/nautobot_device_onboarding/datasources.py @@ -0,0 +1,24 @@ +"""Datasources to override command_mapper yaml files.""" +from nautobot.extras.choices import LogLevelChoices +from nautobot.apps.datasources import DatasourceContent + + +def refresh_git_command_mappers(repository_record, job_result, delete=False): # pylint: disable=unused-argument + """Callback for gitrepository updates on Onboarding Command Mapper Repo.""" + job_result.log( + "Successfully Pulled Command Mapper Repo", + level_choice=LogLevelChoices.LOG_DEBUG, + ) + + +datasource_contents = [ + ( + 'extras.gitrepository', + DatasourceContent( + name='Onboarding Command Mappers', + content_identifier='nautobot_device_onboarding.onboarding_command_mappers', + icon='mdi-paw', + callback=refresh_git_command_mappers, + ) + ) +] diff --git a/nautobot_device_onboarding/helpers.py b/nautobot_device_onboarding/helpers.py deleted file mode 100644 index d0d7bd9c..00000000 --- a/nautobot_device_onboarding/helpers.py +++ /dev/null @@ -1,40 +0,0 @@ -"""OnboardingTask Django model.""" - -import socket - -import netaddr -from netaddr.core import AddrFormatError - -from nautobot_device_onboarding.exceptions import OnboardException - - -def onboarding_task_fqdn_to_ip(address): - """Method to assure OT has FQDN resolved to IP address and rewritten into OT. - - If it is a DNS name, attempt to resolve the DNS address and assign the IP address to the - name. - - Returns: - None - - Raises: - OnboardException("fail-general"): - When a prefix was entered for an IP address - OnboardException("fail-dns"): - When a Name lookup via DNS fails to resolve an IP address - """ - try: - # If successful, this is an IP address and can pass - netaddr.IPAddress(address) - return address - # Raise an Exception for Prefix values - except ValueError as err: - raise OnboardException(f"fail-general - ERROR appears a prefix was entered: {address}") from err - # An AddrFormatError exception means that there is not an IP address in the field, and should continue on - except AddrFormatError: - try: - # Perform DNS Lookup - return socket.gethostbyname(address) - except socket.gaierror as err: - # DNS Lookup has failed, Raise an exception for unable to complete DNS lookup - raise OnboardException(f"fail-dns - ERROR failed to complete DNS lookup: {address}") from err diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 54afe120..7cb08d4e 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -25,7 +25,7 @@ OnboardingNetworkAdapter, ) from nautobot_device_onboarding.exceptions import OnboardException -from nautobot_device_onboarding.helpers import onboarding_task_fqdn_to_ip +from nautobot_device_onboarding.utils.helper import onboarding_task_fqdn_to_ip from nautobot_device_onboarding.netdev_keeper import NetdevKeeper from nautobot_device_onboarding.nornir_plays.command_getter import command_getter_do, command_getter_ni diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index f4653a25..4d8dffb4 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -17,9 +17,9 @@ from nautobot_device_onboarding.nornir_plays.empty_inventory import EmptyInventory from nautobot_device_onboarding.nornir_plays.logger import NornirLogger from nautobot_device_onboarding.nornir_plays.processor import ProcessorDO +from nautobot_device_onboarding.nornir_plays.inventory_creator import _set_inventory from nautobot_device_onboarding.utils.formatter import format_results from nautobot_device_onboarding.utils.helper import add_platform_parsing_info -from nautobot_device_onboarding.utils.inventory_creator import _set_inventory InventoryPluginRegister.register("nautobot-inventory", NautobotORMInventory) InventoryPluginRegister.register("empty-inventory", EmptyInventory) @@ -141,6 +141,8 @@ def command_getter_do(job_result, log_level, kwargs): logging={"enabled": False}, inventory={ "plugin": "empty-inventory", + # Can't use this since we're dynamically generating inventory on demand. + # "transform_function": "transform_to_add_command_parser_info", }, ) as nornir_obj: nr_with_processors = nornir_obj.with_processors([ProcessorDO(logger, compiled_results, kwargs)]) @@ -176,6 +178,7 @@ def command_getter_do(job_result, log_level, kwargs): else: single_host_inventory_constructed = _set_inventory(entered_ip, platform, port, username, password) nr_with_processors.inventory.hosts.update(single_host_inventory_constructed) + logger.info(nr_with_processors.inventory.hosts) nr_with_processors.run(task=netmiko_send_commands, command_getter_job="device_onboarding") except Exception as err: # pylint: disable=broad-exception-caught logger.error(err) diff --git a/nautobot_device_onboarding/nornir_plays/empty_inventory.py b/nautobot_device_onboarding/nornir_plays/empty_inventory.py index d9531d72..a3ed3efd 100755 --- a/nautobot_device_onboarding/nornir_plays/empty_inventory.py +++ b/nautobot_device_onboarding/nornir_plays/empty_inventory.py @@ -6,7 +6,7 @@ class EmptyInventory: """Creates an empty Nornir inventory.""" - def load(self) -> Inventory: + def load(self): """Create a default empty inventory.""" hosts = Hosts() defaults = Defaults(data={}) diff --git a/nautobot_device_onboarding/utils/inventory_creator.py b/nautobot_device_onboarding/nornir_plays/inventory_creator.py similarity index 86% rename from nautobot_device_onboarding/utils/inventory_creator.py rename to nautobot_device_onboarding/nornir_plays/inventory_creator.py index a22f6bbc..4c4e0b14 100755 --- a/nautobot_device_onboarding/utils/inventory_creator.py +++ b/nautobot_device_onboarding/nornir_plays/inventory_creator.py @@ -3,7 +3,7 @@ from netmiko import SSHDetect from nornir.core.inventory import ConnectionOptions, Host -from nautobot_device_onboarding.utils.helper import _get_platform_parsing_info +from nautobot_device_onboarding.utils.helper import add_platform_parsing_info def guess_netmiko_device_type(hostname, username, password, port): @@ -32,15 +32,12 @@ def guess_netmiko_device_type(hostname, username, password, port): def _set_inventory(host_ip, platform, port, username, password): """Construct Nornir Inventory.""" + parsing_info = add_platform_parsing_info(host_ip) inv = {} if platform: platform = platform.network_driver else: platform = guess_netmiko_device_type(host_ip, username, password, port) - if platform: - parsing_info = _get_platform_parsing_info(platform) - else: - parsing_info = {} host = Host( name=host_ip, @@ -58,7 +55,7 @@ def _set_inventory(host_ip, platform, port, username, password): platform=platform, ) }, - data={"platform_parsing_info": parsing_info}, + data={"platform_parsing_info": parsing_info[platform]}, ) inv.update({host_ip: host}) diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index 6d1914a7..60121565 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -1,20 +1,15 @@ """Formatter.""" import json -import os - -import yaml from django.template import engines from django.utils.module_loading import import_string from jdiff import extract_data_from_json from jinja2.sandbox import SandboxedEnvironment from netutils.interface import canonical_interface_name -from nautobot_device_onboarding.constants import INTERFACE_TYPE_MAP_STATIC +from nautobot_device_onboarding.constants import INTERFACE_TYPE_MAP_STATIC, PLUGIN_CFG from nautobot_device_onboarding.utils.jinja_filters import fix_interfaces -DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), "command_mappers")) - def get_django_env(): """Load Django Jinja filters from the Django jinja template engine, and add them to the jinja_env. @@ -32,22 +27,22 @@ def get_django_env(): j2_env["undefined"] = import_string(j2_env["undefined"]) jinja_env = SandboxedEnvironment(**j2_env) jinja_env.filters = engines["jinja"].env.filters + if PLUGIN_CFG.get("custom_post_processing_filters"): + for filter_name, filter_function in PLUGIN_CFG["custom_post_processing_filters"].items(): + try: + func = import_string(filter_function) + except Exception as error: # pylint: disable=broad-except + msg = ( + "There was an issue attempting to import the custom post_processing filters of" + f" {filter_name} this is expected with a local configuration issue " + "and not related to the Device Onboarding App, please contact your system admin for further details" + ) + raise Exception(msg).with_traceback(error.__traceback__) + jinja_env.filters[filter_name] = func jinja_env.filters["fix_interfaces"] = fix_interfaces return jinja_env -def load_yaml_datafile(filename): - """Get the contents of the given YAML data file. - - Args: - filename (str): Filename within the 'data' directory. - """ - file_path = os.path.join(DATA_DIR, filename) - if os.path.isfile(file_path): - with open(file_path, "r", encoding="utf-8") as yaml_file: - data = yaml.safe_load(yaml_file) - return data - def perform_data_extraction(host, dict_field, command_info_dict, j2_env, task_result): """Extract, process data.""" @@ -100,6 +95,8 @@ def extract_show_data(host, multi_result, command_getter_type): multi_result (multiResult): multiresult object from nornir command_getter_type (str): to know what dict to pull, device_onboarding or network_importer. """ + # Think about whether this should become a constant, the env shouldn't change per job execution, but + # perhaps it shouldn't be reused to avoid any memory leak? jinja_env = get_django_env() host_platform = host.platform @@ -110,11 +107,15 @@ def extract_show_data(host, multi_result, command_getter_type): for default_dict_field, command_info in command_jpaths[command_getter_type].items(): if command_info.get("commands"): # Means their isn't any "nested" structures. Therefore not expected to see "validator_pattern key" + if isinstance(command_info["commands"], dict): + command_info["commands"] = [command_info["commands"]] result = perform_data_extraction(host, default_dict_field, command_info, jinja_env, multi_result[0]) final_result_dict.update(result) else: # Means their is a "nested" structures. Priority for dict_field, nested_command_info in command_info.items(): + if isinstance(nested_command_info["commands"], dict): + nested_command_info["commands"] = [nested_command_info["commands"]] result = perform_data_extraction(host, dict_field, nested_command_info, jinja_env, multi_result[0]) final_result_dict.update(result) return final_result_dict diff --git a/nautobot_device_onboarding/utils/helper.py b/nautobot_device_onboarding/utils/helper.py index 4156dc91..557a370c 100644 --- a/nautobot_device_onboarding/utils/helper.py +++ b/nautobot_device_onboarding/utils/helper.py @@ -1,10 +1,17 @@ """helper.py.""" - +import os +import socket +import netaddr +import yaml +from netaddr.core import AddrFormatError from nautobot.dcim.filters import DeviceFilterSet from nautobot.dcim.models import Device +from nautobot.extras.models import GitRepository from nornir_nautobot.exceptions import NornirNautobotException -from nautobot_device_onboarding.utils.formatter import load_yaml_datafile +from nautobot_device_onboarding.exceptions import OnboardException + +DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), "command_mappers")) FIELDS_PK = { "location", @@ -51,15 +58,58 @@ def get_job_filter(data=None): return devices_filtered.qs -def _get_platform_parsing_info(host_platform): - """Open and load yaml file.""" - if host_platform == "cisco_xe": - host_platform = "cisco_ios" - yaml_parsing_info = load_yaml_datafile(f"{host_platform}.yml") - return yaml_parsing_info +def onboarding_task_fqdn_to_ip(address): + """Method to assure OT has FQDN resolved to IP address and rewritten into OT. + + If it is a DNS name, attempt to resolve the DNS address and assign the IP address to the + name. + + Returns: + None + + Raises: + OnboardException("fail-general"): + When a prefix was entered for an IP address + OnboardException("fail-dns"): + When a Name lookup via DNS fails to resolve an IP address + """ + try: + # If successful, this is an IP address and can pass + netaddr.IPAddress(address) + return address + # Raise an Exception for Prefix values + except ValueError as err: + raise OnboardException(f"fail-general - ERROR appears a prefix was entered: {address}") from err + # An AddrFormatError exception means that there is not an IP address in the field, and should continue on + except AddrFormatError: + try: + # Perform DNS Lookup + return socket.gethostbyname(address) + except socket.gaierror as err: + # DNS Lookup has failed, Raise an exception for unable to complete DNS lookup + raise OnboardException(f"fail-dns - ERROR failed to complete DNS lookup: {address}") from err def add_platform_parsing_info(host): """This nornir transform function adds platform parsing info.""" - parsing_info = _get_platform_parsing_info(host.platform) - host.data.update({"platform_parsing_info": parsing_info}) + repository_record = GitRepository.objects.filter(provided_contents=['nautobot_device_onboarding.onboarding_command_mappers']).first() + repo_data_dir = os.path.join(repository_record.filesystem_path, 'onboarding_command_mappers') + command_mapper_defaults = load_command_mappers_from_dir(DATA_DIR) + command_mappers_repo_path = load_command_mappers_from_dir(repo_data_dir) + # parsing_info = _get_default_platform_parsing_info(host.platform) + merged_command_mappers = {**command_mapper_defaults, **command_mappers_repo_path} + # This is so we can reuse this for a non-nornir host object since we don't have it in an empty inventory at this point. + if not isinstance(host, str): + host.data.update({"platform_parsing_info": merged_command_mappers}) + return merged_command_mappers + + +def load_command_mappers_from_dir(command_mappers_path): + """Helper to load all yaml files in directory and return merged dictionary.""" + command_mappers_result = {} + for filename in os.listdir(command_mappers_path): + with open(os.path.join(command_mappers_path, filename), encoding="utf-8") as fd: + network_driver = filename.split('.')[0] + command_mappers_data = yaml.safe_load(fd) + command_mappers_result[network_driver] = command_mappers_data + return command_mappers_result From ed4d534dc9033d8d97fc97ea52bc2fdd0f22ccbb Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Thu, 4 Apr 2024 20:26:42 +0000 Subject: [PATCH 185/225] update list conversion --- .../command_mappers/cisco_nxos.yml | 10 +++ nautobot_device_onboarding/utils/formatter.py | 88 ++++++++++++++----- 2 files changed, 76 insertions(+), 22 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml index 92a648b1..359115d2 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml @@ -71,3 +71,13 @@ network_importer: - command: "show interface" use_textfsm: true jpath: "[*].{interface: interface, mode: mode}" + vrf_interfaces: + commands: + - command: "show vrf interface" + use_textfsm: true + jpath: "[*].{interface: interface, name: name, id: id}" + vrf_rds: + commands: + - command: "show vrf detail" + use_textfsm: true + jpath: "[*].{id: id, name: name, default_rd: default_rd}" diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/utils/formatter.py index 6539b064..4fa0e8e9 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/utils/formatter.py @@ -75,10 +75,10 @@ def perform_data_extraction(host, dict_field, command_info_dict, j2_env, task_re print(f"extracted 1: {extracted_processed}") else: extracted_processed = extracted_value - # print(f"extracted 2: {extracted_processed}") - # if isinstance(extracted_value, list) and len(extracted_value) == 1: - # extracted_processed = extracted_value[0] - # print(f"extracted 3: {extracted_processed}") + print(f"extracted 2: {extracted_processed}") + if isinstance(extracted_value, list) and len(extracted_value) == 1: + extracted_processed = extracted_value[0] + print(f"extracted 3: {extracted_processed}") if command_info_dict.get("validator_pattern"): # temp validator if command_info_dict["validator_pattern"] == "not None": @@ -126,18 +126,36 @@ def map_interface_type(interface_type): return INTERFACE_TYPE_MAP_STATIC.get(interface_type, "other") +def ensure_list(data): + """Ensure data is a list.""" + if not isinstance(data, list): + return [data] + return data + + def format_ios_results(device): """Format the results of the show commands for IOS devices.""" + try: serial = device.get("serial") - mtu_list = device.get("mtu", []) - type_list = device.get("type", []) - ip_list = device.get("ip_addresses", []) - prefix_list = device.get("prefix_length", []) - mac_list = device.get("mac_address", []) - description_list = device.get("description", []) - link_status_list = device.get("link_status", []) - vrf_list = device.get("vrfs", []) + mtus = device.get("mtu", []) + types = device.get("type", []) + ips = device.get("ip_addresses", []) + prefixes = device.get("prefix_length", []) + macs = device.get("mac_address", []) + descriptions = device.get("description", []) + link_statuses = device.get("link_status", []) + vrfs = device.get("vrfs", []) + + # Some data may come across as a dict, needs to be list. Probably should do this elsewhere. + mtu_list = ensure_list(mtus) + type_list = ensure_list(types) + ip_list = ensure_list(ips) + prefix_list = ensure_list(prefixes) + mac_list = ensure_list(macs) + description_list = ensure_list(descriptions) + link_status_list = ensure_list(link_statuses) + vrf_list = ensure_list(vrfs) interface_dict = {} for item in mtu_list: @@ -193,24 +211,50 @@ def format_ios_results(device): del device["vrfs"] except KeyError: - device = {"failed": True, "failed_reason": f"Formatting error for device {device}"} + device = {"failed": True, "failed_reason": f"Formatting error 2 for device {device}"} + except Exception as e: + device = {"failed": True, "failed_reason": f"Formatting error 1 {e} for device {device}"} + print(f"susan {device}") + return device + + +def format_nxos_vrf_results(device): + """Format the show commands to get interface and rd""" + try: + vrf_interface_list = device.get("vrf_interfaces", []) + vrf_rd_list = device.get("vrf_rds", []) + + dict2 = {item["id"]: item for item in list2} + + for id in vrf_interface_list: + id.update(vrf_rd_list.get(id["id"], {})) + print(f"vrf_interface_list {vrf_interface_list}") except Exception: device = {"failed": True, "failed_reason": f"Formatting error for device {device}"} - return device + return vrf_interface_list def format_nxos_results(device): """Format the results of the show commands for NX-OS devices.""" try: serial = device.get("serial") - mtu_list = device.get("mtu", []) - type_list = device.get("type", []) - ip_list = device.get("ip_addresses", []) - prefix_list = device.get("prefix_length", []) - mac_list = device.get("mac_address", []) - description_list = device.get("description", []) - link_status_list = device.get("link_status", []) - mode_list = device.get("mode", []) + mtus = device.get("mtu", []) + types = device.get("type", []) + ips = device.get("ip_addresses", []) + prefixes = device.get("prefix_length", []) + macs = device.get("mac_address", []) + descriptions = device.get("description", []) + link_statuses = device.get("link_status", []) + modes = device.get("mode", []) + + mtu_list = ensure_list(mtus) + type_list = ensure_list(types) + ip_list = ensure_list(ips) + prefix_list = ensure_list(prefixes) + mac_list = ensure_list(macs) + description_list = ensure_list(descriptions) + link_status_list = ensure_list(link_statuses) + mode_list = ensure_list(modes) interface_dict = {} for item in mtu_list: From 4b71227948a0775b8f55f9f20a064bf263933a3e Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Thu, 4 Apr 2024 15:50:53 -0500 Subject: [PATCH 186/225] more refactors and cleanups --- nautobot_device_onboarding/constants.py | 1 + nautobot_device_onboarding/datasources.py | 11 ++--- .../nornir_plays/command_getter.py | 6 +-- .../nornir_plays/empty_inventory.py | 2 +- .../{utils => nornir_plays}/formatter.py | 3 +- .../nornir_plays/inventory_creator.py | 2 +- .../{utils => nornir_plays}/jinja_filters.py | 0 .../nornir_plays/processor.py | 2 +- .../nornir_plays/transform.py | 42 +++++++++++++++++++ nautobot_device_onboarding/utils/helper.py | 33 +-------------- 10 files changed, 58 insertions(+), 44 deletions(-) rename nautobot_device_onboarding/{utils => nornir_plays}/formatter.py (99%) rename nautobot_device_onboarding/{utils => nornir_plays}/jinja_filters.py (100%) create mode 100755 nautobot_device_onboarding/nornir_plays/transform.py diff --git a/nautobot_device_onboarding/constants.py b/nautobot_device_onboarding/constants.py index 16853853..b955b781 100644 --- a/nautobot_device_onboarding/constants.py +++ b/nautobot_device_onboarding/constants.py @@ -1,4 +1,5 @@ """Constants for nautobot_device_onboarding app.""" + import os from django.conf import settings diff --git a/nautobot_device_onboarding/datasources.py b/nautobot_device_onboarding/datasources.py index 5acda3fd..313fb62d 100755 --- a/nautobot_device_onboarding/datasources.py +++ b/nautobot_device_onboarding/datasources.py @@ -1,4 +1,5 @@ """Datasources to override command_mapper yaml files.""" + from nautobot.extras.choices import LogLevelChoices from nautobot.apps.datasources import DatasourceContent @@ -13,12 +14,12 @@ def refresh_git_command_mappers(repository_record, job_result, delete=False): # datasource_contents = [ ( - 'extras.gitrepository', + "extras.gitrepository", DatasourceContent( - name='Onboarding Command Mappers', - content_identifier='nautobot_device_onboarding.onboarding_command_mappers', - icon='mdi-paw', + name="Onboarding Command Mappers", + content_identifier="nautobot_device_onboarding.onboarding_command_mappers", + icon="mdi-paw", callback=refresh_git_command_mappers, - ) + ), ) ] diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index 4d8dffb4..ab309a1a 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -18,8 +18,8 @@ from nautobot_device_onboarding.nornir_plays.logger import NornirLogger from nautobot_device_onboarding.nornir_plays.processor import ProcessorDO from nautobot_device_onboarding.nornir_plays.inventory_creator import _set_inventory -from nautobot_device_onboarding.utils.formatter import format_results -from nautobot_device_onboarding.utils.helper import add_platform_parsing_info +from nautobot_device_onboarding.nornir_plays.formatter import format_results +from nautobot_device_onboarding.nornir_plays.transform import add_platform_parsing_info InventoryPluginRegister.register("nautobot-inventory", NautobotORMInventory) InventoryPluginRegister.register("empty-inventory", EmptyInventory) @@ -178,7 +178,7 @@ def command_getter_do(job_result, log_level, kwargs): else: single_host_inventory_constructed = _set_inventory(entered_ip, platform, port, username, password) nr_with_processors.inventory.hosts.update(single_host_inventory_constructed) - logger.info(nr_with_processors.inventory.hosts) + logger.info(nr_with_processors.inventory.defaults.data) nr_with_processors.run(task=netmiko_send_commands, command_getter_job="device_onboarding") except Exception as err: # pylint: disable=broad-exception-caught logger.error(err) diff --git a/nautobot_device_onboarding/nornir_plays/empty_inventory.py b/nautobot_device_onboarding/nornir_plays/empty_inventory.py index a3ed3efd..d9531d72 100755 --- a/nautobot_device_onboarding/nornir_plays/empty_inventory.py +++ b/nautobot_device_onboarding/nornir_plays/empty_inventory.py @@ -6,7 +6,7 @@ class EmptyInventory: """Creates an empty Nornir inventory.""" - def load(self): + def load(self) -> Inventory: """Create a default empty inventory.""" hosts = Hosts() defaults = Defaults(data={}) diff --git a/nautobot_device_onboarding/utils/formatter.py b/nautobot_device_onboarding/nornir_plays/formatter.py similarity index 99% rename from nautobot_device_onboarding/utils/formatter.py rename to nautobot_device_onboarding/nornir_plays/formatter.py index 60121565..0a610a6d 100755 --- a/nautobot_device_onboarding/utils/formatter.py +++ b/nautobot_device_onboarding/nornir_plays/formatter.py @@ -8,7 +8,7 @@ from netutils.interface import canonical_interface_name from nautobot_device_onboarding.constants import INTERFACE_TYPE_MAP_STATIC, PLUGIN_CFG -from nautobot_device_onboarding.utils.jinja_filters import fix_interfaces +from nautobot_device_onboarding.nornir_plays.jinja_filters import fix_interfaces def get_django_env(): @@ -43,7 +43,6 @@ def get_django_env(): return jinja_env - def perform_data_extraction(host, dict_field, command_info_dict, j2_env, task_result): """Extract, process data.""" result_dict = {} diff --git a/nautobot_device_onboarding/nornir_plays/inventory_creator.py b/nautobot_device_onboarding/nornir_plays/inventory_creator.py index 4c4e0b14..36c4e2c8 100755 --- a/nautobot_device_onboarding/nornir_plays/inventory_creator.py +++ b/nautobot_device_onboarding/nornir_plays/inventory_creator.py @@ -3,7 +3,7 @@ from netmiko import SSHDetect from nornir.core.inventory import ConnectionOptions, Host -from nautobot_device_onboarding.utils.helper import add_platform_parsing_info +from nautobot_device_onboarding.nornir_plays.transform import add_platform_parsing_info def guess_netmiko_device_type(hostname, username, password, port): diff --git a/nautobot_device_onboarding/utils/jinja_filters.py b/nautobot_device_onboarding/nornir_plays/jinja_filters.py similarity index 100% rename from nautobot_device_onboarding/utils/jinja_filters.py rename to nautobot_device_onboarding/nornir_plays/jinja_filters.py diff --git a/nautobot_device_onboarding/nornir_plays/processor.py b/nautobot_device_onboarding/nornir_plays/processor.py index 63792d59..e8a4cbc6 100755 --- a/nautobot_device_onboarding/nornir_plays/processor.py +++ b/nautobot_device_onboarding/nornir_plays/processor.py @@ -6,7 +6,7 @@ from nornir.core.task import MultiResult, Task from nornir_nautobot.plugins.processors import BaseLoggingProcessor -from nautobot_device_onboarding.utils.formatter import extract_show_data +from nautobot_device_onboarding.nornir_plays.formatter import extract_show_data class ProcessorDO(BaseLoggingProcessor): diff --git a/nautobot_device_onboarding/nornir_plays/transform.py b/nautobot_device_onboarding/nornir_plays/transform.py new file mode 100755 index 00000000..7b59d1f9 --- /dev/null +++ b/nautobot_device_onboarding/nornir_plays/transform.py @@ -0,0 +1,42 @@ +"""Nornir tranform function to add command mapper, platform parsing info.""" + +import os +import yaml +from nautobot.extras.models import GitRepository + +DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), "command_mappers")) + + +def add_platform_parsing_info(host): + """This nornir transform function adds platform parsing info.""" + if ( + GitRepository.objects.filter( + provided_contents=["nautobot_device_onboarding.onboarding_command_mappers"] + ).count() + == 1 + ): + repository_record = GitRepository.objects.filter( + provided_contents=["nautobot_device_onboarding.onboarding_command_mappers"] + ).first() + repo_data_dir = os.path.join(repository_record.filesystem_path, "onboarding_command_mappers") + command_mappers_repo_path = load_command_mappers_from_dir(repo_data_dir) + else: + command_mappers_repo_path = {} + command_mapper_defaults = load_command_mappers_from_dir(DATA_DIR) + # parsing_info = _get_default_platform_parsing_info(host.platform) + merged_command_mappers = {**command_mapper_defaults, **command_mappers_repo_path} + # This is so we can reuse this for a non-nornir host object since we don't have it in an empty inventory at this point. + if not isinstance(host, str): + host.data.update({"platform_parsing_info": merged_command_mappers}) + return merged_command_mappers + + +def load_command_mappers_from_dir(command_mappers_path): + """Helper to load all yaml files in directory and return merged dictionary.""" + command_mappers_result = {} + for filename in os.listdir(command_mappers_path): + with open(os.path.join(command_mappers_path, filename), encoding="utf-8") as fd: + network_driver = filename.split(".")[0] + command_mappers_data = yaml.safe_load(fd) + command_mappers_result[network_driver] = command_mappers_data + return command_mappers_result diff --git a/nautobot_device_onboarding/utils/helper.py b/nautobot_device_onboarding/utils/helper.py index 557a370c..b8aae847 100644 --- a/nautobot_device_onboarding/utils/helper.py +++ b/nautobot_device_onboarding/utils/helper.py @@ -1,17 +1,13 @@ -"""helper.py.""" -import os +"""General helper functions for the app.""" + import socket import netaddr -import yaml from netaddr.core import AddrFormatError from nautobot.dcim.filters import DeviceFilterSet from nautobot.dcim.models import Device -from nautobot.extras.models import GitRepository from nornir_nautobot.exceptions import NornirNautobotException - from nautobot_device_onboarding.exceptions import OnboardException -DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), "command_mappers")) FIELDS_PK = { "location", @@ -88,28 +84,3 @@ def onboarding_task_fqdn_to_ip(address): except socket.gaierror as err: # DNS Lookup has failed, Raise an exception for unable to complete DNS lookup raise OnboardException(f"fail-dns - ERROR failed to complete DNS lookup: {address}") from err - - -def add_platform_parsing_info(host): - """This nornir transform function adds platform parsing info.""" - repository_record = GitRepository.objects.filter(provided_contents=['nautobot_device_onboarding.onboarding_command_mappers']).first() - repo_data_dir = os.path.join(repository_record.filesystem_path, 'onboarding_command_mappers') - command_mapper_defaults = load_command_mappers_from_dir(DATA_DIR) - command_mappers_repo_path = load_command_mappers_from_dir(repo_data_dir) - # parsing_info = _get_default_platform_parsing_info(host.platform) - merged_command_mappers = {**command_mapper_defaults, **command_mappers_repo_path} - # This is so we can reuse this for a non-nornir host object since we don't have it in an empty inventory at this point. - if not isinstance(host, str): - host.data.update({"platform_parsing_info": merged_command_mappers}) - return merged_command_mappers - - -def load_command_mappers_from_dir(command_mappers_path): - """Helper to load all yaml files in directory and return merged dictionary.""" - command_mappers_result = {} - for filename in os.listdir(command_mappers_path): - with open(os.path.join(command_mappers_path, filename), encoding="utf-8") as fd: - network_driver = filename.split('.')[0] - command_mappers_data = yaml.safe_load(fd) - command_mappers_result[network_driver] = command_mappers_data - return command_mappers_result From 658ab2e0eb26aa1d301c074fa8fca46ddf923cb9 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Thu, 4 Apr 2024 15:58:12 -0500 Subject: [PATCH 187/225] fix custom filters development config --- development/nautobot_config.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/development/nautobot_config.py b/development/nautobot_config.py index 677f04cf..88c5660a 100644 --- a/development/nautobot_config.py +++ b/development/nautobot_config.py @@ -137,10 +137,9 @@ # Each key in the dictionary is the name of an installed App and its value is a dictionary of settings. PLUGINS_CONFIG = { "nautobot_device_onboarding": { - "custom_post_processing_filters": { - "example_filter_one": "parts.jeff.jeff_interfaces", - # "example_filter_two": "my_package.my_filters.filtertwo" - } + # "custom_post_processing_filters": { + # "example_filter_one": "my_package.my_filters.filterone" + # } }, "nautobot_ssot": { "hide_example_jobs": is_truthy(os.getenv("NAUTOBOT_SSOT_HIDE_EXAMPLE_JOBS")), From 2f9424fa9a54e24e49bf6c40a2ff935766e880b9 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Thu, 4 Apr 2024 17:04:22 -0500 Subject: [PATCH 188/225] remove jinja2 fitler registration in favor of native support with decorator --- development/nautobot_config.py | 6 +--- .../nornir_plays/command_getter.py | 1 - .../nornir_plays/formatter.py | 36 +++++++++++-------- .../nornir_plays/jinja_filters.py | 6 ++-- 4 files changed, 26 insertions(+), 23 deletions(-) diff --git a/development/nautobot_config.py b/development/nautobot_config.py index 88c5660a..9456005a 100644 --- a/development/nautobot_config.py +++ b/development/nautobot_config.py @@ -136,11 +136,7 @@ # Apps configuration settings. These settings are used by various Apps that the user may have installed. # Each key in the dictionary is the name of an installed App and its value is a dictionary of settings. PLUGINS_CONFIG = { - "nautobot_device_onboarding": { - # "custom_post_processing_filters": { - # "example_filter_one": "my_package.my_filters.filterone" - # } - }, + "nautobot_device_onboarding": {}, "nautobot_ssot": { "hide_example_jobs": is_truthy(os.getenv("NAUTOBOT_SSOT_HIDE_EXAMPLE_JOBS")), }, diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index ab309a1a..70669a24 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -178,7 +178,6 @@ def command_getter_do(job_result, log_level, kwargs): else: single_host_inventory_constructed = _set_inventory(entered_ip, platform, port, username, password) nr_with_processors.inventory.hosts.update(single_host_inventory_constructed) - logger.info(nr_with_processors.inventory.defaults.data) nr_with_processors.run(task=netmiko_send_commands, command_getter_job="device_onboarding") except Exception as err: # pylint: disable=broad-exception-caught logger.error(err) diff --git a/nautobot_device_onboarding/nornir_plays/formatter.py b/nautobot_device_onboarding/nornir_plays/formatter.py index 715c6f10..93cd9d50 100755 --- a/nautobot_device_onboarding/nornir_plays/formatter.py +++ b/nautobot_device_onboarding/nornir_plays/formatter.py @@ -8,8 +8,7 @@ from netutils.interface import canonical_interface_name from nautobot.dcim.models import Device -from nautobot_device_onboarding.constants import INTERFACE_TYPE_MAP_STATIC, PLUGIN_CFG -from nautobot_device_onboarding.nornir_plays.jinja_filters import fix_interfaces +from nautobot_device_onboarding.constants import INTERFACE_TYPE_MAP_STATIC def get_django_env(): @@ -28,19 +27,21 @@ def get_django_env(): j2_env["undefined"] = import_string(j2_env["undefined"]) jinja_env = SandboxedEnvironment(**j2_env) jinja_env.filters = engines["jinja"].env.filters - if PLUGIN_CFG.get("custom_post_processing_filters"): - for filter_name, filter_function in PLUGIN_CFG["custom_post_processing_filters"].items(): - try: - func = import_string(filter_function) - except Exception as error: # pylint: disable=broad-except - msg = ( - "There was an issue attempting to import the custom post_processing filters of" - f" {filter_name} this is expected with a local configuration issue " - "and not related to the Device Onboarding App, please contact your system admin for further details" - ) - raise Exception(msg).with_traceback(error.__traceback__) - jinja_env.filters[filter_name] = func - jinja_env.filters["fix_interfaces"] = fix_interfaces + # https://docs.nautobot.com/projects/core/en/stable/development/apps/api/platform-features/jinja2-filters/ + # + # if PLUGIN_CFG.get("custom_post_processing_filters"): + # for filter_name, filter_function in PLUGIN_CFG["custom_post_processing_filters"].items(): + # try: + # func = import_string(filter_function) + # except Exception as error: # pylint: disable=broad-except + # msg = ( + # "There was an issue attempting to import the custom post_processing filters of" + # f" {filter_name} this is expected with a local configuration issue " + # "and not related to the Device Onboarding App, please contact your system admin for further details" + # ) + # raise Exception(msg).with_traceback(error.__traceback__) + # jinja_env.filters[filter_name] = func + # jinja_env.filters["fix_interfaces"] = fix_interfaces return jinja_env @@ -123,6 +124,11 @@ def extract_show_data(host, multi_result, command_getter_type): def map_interface_type(interface_type): """Map interface type to a Nautobot type.""" + # Can maybe this be used? + # from nautobot.dcim.choices import InterfaceTypeChoices + # InterfaceTypeChoices.CHOICES + # In [15]: dict(InterfaceTypeChoices.CHOICES).get('Other') + # Out[15]: (('other', 'Other'),) return INTERFACE_TYPE_MAP_STATIC.get(interface_type, "other") diff --git a/nautobot_device_onboarding/nornir_plays/jinja_filters.py b/nautobot_device_onboarding/nornir_plays/jinja_filters.py index 1f82cd78..bca8f487 100755 --- a/nautobot_device_onboarding/nornir_plays/jinja_filters.py +++ b/nautobot_device_onboarding/nornir_plays/jinja_filters.py @@ -1,13 +1,15 @@ """Filters for Jinja2 PostProcessing.""" - +from django_jinja import library from nautobot_device_onboarding.constants import INTERFACE_TYPE_MAP_STATIC +# https://docs.nautobot.com/projects/core/en/stable/development/apps/api/platform-features/jinja2-filters/ +@library.filter def map_interface_type(interface_type): """Map interface type to a Nautobot type.""" return INTERFACE_TYPE_MAP_STATIC.get(interface_type, "other") - +@library.filter def fix_interfaces(interfaces): """Prep interface formatting for SSoT.""" for interface in interfaces: From 427f75444b3b8b7cc8595e13a01f51d4ec84bb62 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Thu, 4 Apr 2024 21:52:22 -0500 Subject: [PATCH 189/225] linters, formatters, stuff --- nautobot_device_onboarding/constants.py | 1 - nautobot_device_onboarding/nornir_plays/formatter.py | 7 +++---- nautobot_device_onboarding/nornir_plays/jinja_filters.py | 3 +++ 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/nautobot_device_onboarding/constants.py b/nautobot_device_onboarding/constants.py index b955b781..440db241 100644 --- a/nautobot_device_onboarding/constants.py +++ b/nautobot_device_onboarding/constants.py @@ -1,6 +1,5 @@ """Constants for nautobot_device_onboarding app.""" -import os from django.conf import settings diff --git a/nautobot_device_onboarding/nornir_plays/formatter.py b/nautobot_device_onboarding/nornir_plays/formatter.py index 93cd9d50..f1c61eaa 100755 --- a/nautobot_device_onboarding/nornir_plays/formatter.py +++ b/nautobot_device_onboarding/nornir_plays/formatter.py @@ -28,7 +28,7 @@ def get_django_env(): jinja_env = SandboxedEnvironment(**j2_env) jinja_env.filters = engines["jinja"].env.filters # https://docs.nautobot.com/projects/core/en/stable/development/apps/api/platform-features/jinja2-filters/ - # + # # if PLUGIN_CFG.get("custom_post_processing_filters"): # for filter_name, filter_function in PLUGIN_CFG["custom_post_processing_filters"].items(): # try: @@ -141,7 +141,6 @@ def ensure_list(data): def format_ios_results(device): """Format the results of the show commands for IOS devices.""" - try: serial = device.get("serial") mtus = device.get("mtu", []) @@ -225,12 +224,12 @@ def format_ios_results(device): def format_nxos_vrf_results(device): - """Format the show commands to get interface and rd""" + """Format the show commands to get interface and rd.""" try: vrf_interface_list = device.get("vrf_interfaces", []) vrf_rd_list = device.get("vrf_rds", []) - dict2 = {item["id"]: item for item in list2} + # dict2 = {item["id"]: item for item in list2} # jeff commented out since it wasn't used at all. for id in vrf_interface_list: id.update(vrf_rd_list.get(id["id"], {})) diff --git a/nautobot_device_onboarding/nornir_plays/jinja_filters.py b/nautobot_device_onboarding/nornir_plays/jinja_filters.py index bca8f487..52377259 100755 --- a/nautobot_device_onboarding/nornir_plays/jinja_filters.py +++ b/nautobot_device_onboarding/nornir_plays/jinja_filters.py @@ -1,14 +1,17 @@ """Filters for Jinja2 PostProcessing.""" + from django_jinja import library from nautobot_device_onboarding.constants import INTERFACE_TYPE_MAP_STATIC # https://docs.nautobot.com/projects/core/en/stable/development/apps/api/platform-features/jinja2-filters/ + @library.filter def map_interface_type(interface_type): """Map interface type to a Nautobot type.""" return INTERFACE_TYPE_MAP_STATIC.get(interface_type, "other") + @library.filter def fix_interfaces(interfaces): """Prep interface formatting for SSoT.""" From 812406ee4938c745696ae77877ff8b410b77e542 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Fri, 5 Apr 2024 10:38:31 -0500 Subject: [PATCH 190/225] fix platform parsing info --- nautobot_device_onboarding/nornir_plays/transform.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/nornir_plays/transform.py b/nautobot_device_onboarding/nornir_plays/transform.py index 7b59d1f9..9a7cfb07 100755 --- a/nautobot_device_onboarding/nornir_plays/transform.py +++ b/nautobot_device_onboarding/nornir_plays/transform.py @@ -27,7 +27,7 @@ def add_platform_parsing_info(host): merged_command_mappers = {**command_mapper_defaults, **command_mappers_repo_path} # This is so we can reuse this for a non-nornir host object since we don't have it in an empty inventory at this point. if not isinstance(host, str): - host.data.update({"platform_parsing_info": merged_command_mappers}) + host.data.update({"platform_parsing_info": merged_command_mappers[host.platform]}) return merged_command_mappers From 90de339a32bc7e25184b3170ece33312b4ef55f4 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Fri, 5 Apr 2024 12:18:48 -0500 Subject: [PATCH 191/225] fix return indent level --- nautobot_device_onboarding/nornir_plays/transform.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/nornir_plays/transform.py b/nautobot_device_onboarding/nornir_plays/transform.py index 9a7cfb07..c749bd6e 100755 --- a/nautobot_device_onboarding/nornir_plays/transform.py +++ b/nautobot_device_onboarding/nornir_plays/transform.py @@ -39,4 +39,4 @@ def load_command_mappers_from_dir(command_mappers_path): network_driver = filename.split(".")[0] command_mappers_data = yaml.safe_load(fd) command_mappers_result[network_driver] = command_mappers_data - return command_mappers_result + return command_mappers_result From d8b04d305b97c7e1a4acaf27e74dc2804f11c155 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Fri, 5 Apr 2024 12:22:08 -0500 Subject: [PATCH 192/225] fix return indent level --- nautobot_device_onboarding/nornir_plays/transform.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/nornir_plays/transform.py b/nautobot_device_onboarding/nornir_plays/transform.py index c749bd6e..f98ca1eb 100755 --- a/nautobot_device_onboarding/nornir_plays/transform.py +++ b/nautobot_device_onboarding/nornir_plays/transform.py @@ -27,7 +27,10 @@ def add_platform_parsing_info(host): merged_command_mappers = {**command_mapper_defaults, **command_mappers_repo_path} # This is so we can reuse this for a non-nornir host object since we don't have it in an empty inventory at this point. if not isinstance(host, str): - host.data.update({"platform_parsing_info": merged_command_mappers[host.platform]}) + if host.platform == 'cisco_xe': + host.data.update({"platform_parsing_info": merged_command_mappers['cisco_ios']}) + else: + host.data.update({"platform_parsing_info": merged_command_mappers[host.platform]}) return merged_command_mappers From 6791f8db856c677f5ba9966766ea17da50011920 Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Fri, 5 Apr 2024 17:37:39 +0000 Subject: [PATCH 193/225] add default dict --- .../nornir_plays/command_getter.py | 1 - nautobot_device_onboarding/nornir_plays/formatter.py | 10 +++++++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index 70669a24..24938095 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -210,7 +210,6 @@ def command_getter_ni(job_result, log_level, kwargs): except Exception as err: # pylint: disable=broad-exception-caught logger.info("Error: %s", err) return err - compiled_results = format_results(compiled_results) return compiled_results diff --git a/nautobot_device_onboarding/nornir_plays/formatter.py b/nautobot_device_onboarding/nornir_plays/formatter.py index f1c61eaa..899df835 100755 --- a/nautobot_device_onboarding/nornir_plays/formatter.py +++ b/nautobot_device_onboarding/nornir_plays/formatter.py @@ -182,6 +182,9 @@ def format_ios_results(device): interface_dict.setdefault(item["interface"], {})["link_status"] = ( True if item["link_status"] == "up" else False ) + for interface in interface_dict.values(): + interface.setdefault("vrf", {}) + for vrf in vrf_list: for interface in vrf["interfaces"]: canonical_name = canonical_interface_name(interface) @@ -219,7 +222,7 @@ def format_ios_results(device): device = {"failed": True, "failed_reason": f"Formatting error 2 for device {device}"} except Exception as e: device = {"failed": True, "failed_reason": f"Formatting error 1 {e} for device {device}"} - print(f"susan {device}") + return device @@ -251,7 +254,9 @@ def format_nxos_results(device): descriptions = device.get("description", []) link_statuses = device.get("link_status", []) modes = device.get("mode", []) - + vrfs_rd = device.get("vrf_rds", []) + vrfs_interfaces = device.get("vrf_interfaces", []) + print(f"vrfs_rd {vrfs_rd}, vrf_interfaces {vrfs_interfaces}") mtu_list = ensure_list(mtus) type_list = ensure_list(types) ip_list = ensure_list(ips) @@ -287,7 +292,6 @@ def format_nxos_results(device): ) for interface in interface_dict.values(): - # interface.setdefault("802.1Q_mode", "") interface.setdefault("lag", "") interface.setdefault("untagged_vlan", {}) interface.setdefault("tagged_vlans", []) From fe38059992fdc659724872533887a508020fe511 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Fri, 5 Apr 2024 14:01:31 -0500 Subject: [PATCH 194/225] more cleanups remove transform function --- .../command_mappers/cisco_xe.yml | 82 +++ .../nornir_plays/command_getter.py | 30 +- .../nornir_plays/empty_inventory.py | 3 +- .../nornir_plays/formatter.py | 15 - .../nornir_plays/jinja_filters.py | 4 +- .../nornir_plays/transform.py | 13 +- poetry.lock | 684 ++++++++++++------ tasks.py | 2 +- 8 files changed, 583 insertions(+), 250 deletions(-) create mode 100755 nautobot_device_onboarding/command_mappers/cisco_xe.yml diff --git a/nautobot_device_onboarding/command_mappers/cisco_xe.yml b/nautobot_device_onboarding/command_mappers/cisco_xe.yml new file mode 100755 index 00000000..4a550896 --- /dev/null +++ b/nautobot_device_onboarding/command_mappers/cisco_xe.yml @@ -0,0 +1,82 @@ +--- +device_onboarding: + hostname: + commands: + - command: "show version" + use_textfsm: true + post_processor: "{{ obj[0] | upper }}" + jpath: "[*].hostname" + serial: + commands: + - command: "show version" + use_textfsm: true + jpath: "[*].serial[0]" + device_type: + commands: + - command: "show version" + use_textfsm: true + jpath: "[*].hardware[0]" + mgmt_interface: + commands: + - command: "show interfaces" + use_textfsm: true + jpath: "[?ip_address=='{{ obj }}'].{name: interface, enabled: link_status}" + post_processor: "{{ (obj | selectattr('enabled', 'eq', 'up') | list | first ).name }}" + mask_length: + commands: + - command: "show interfaces" + use_textfsm: true + jpath: "[?ip_address=='{{ obj }}'].prefix_length" + post_processor: "{{ obj | unique | first }}" +network_importer: + serial: + commands: + - command: "show version" + use_textfsm: true + jpath: "[*].serial[0]" + # interfaces: + # interfaces: + # commands: + # - command: "show interfaces" + # use_textfsm: true + # jpath: "[*].{interface: interface}" + type: + commands: + - command: "show interfaces" + use_textfsm: true + jpath: "[*].{interface: interface, type: hardware_type}" + ip_addresses: + commands: + - command: "show interfaces" + use_textfsm: true + jpath: "[*].{interface: interface, ip_address: ip_address}" + prefix_length: + commands: + - command: "show interfaces" + use_textfsm: true + jpath: "[*].{interface: interface, prefix_length: prefix_length}" + mtu: + commands: + - command: "show interfaces" + use_textfsm: true + jpath: "[*].{interface: interface, mtu: mtu}" + mac_address: + commands: + - command: "show interfaces" + use_textfsm: true + jpath: "[*].{interface: interface, mac_address: mac_address}" + description: + commands: + - command: "show interfaces" + use_textfsm: true + jpath: "[*].{interface: interface, description: description}" + link_status: + commands: + - command: "show interfaces" + use_textfsm: true + jpath: "[*].{interface: interface, link_status: link_status}" + vrfs: + commands: + - command: "show vrf" + use_textfsm: true + jpath: "[*].{name: name, default_rd: default_rd, interfaces: interfaces}" diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index 70669a24..bb8d455f 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -1,6 +1,7 @@ """CommandGetter.""" # pylint: disable=relative-beyond-top-level +from typing import Dict from django.conf import settings from nautobot.dcim.models import Platform from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices @@ -9,7 +10,7 @@ from nautobot_plugin_nornir.plugins.inventory.nautobot_orm import NautobotORMInventory from nornir import InitNornir from nornir.core.exceptions import NornirSubTaskError -from nornir.core.plugins.inventory import InventoryPluginRegister, TransformFunctionRegister +from nornir.core.plugins.inventory import InventoryPluginRegister from nornir.core.task import Result, Task from nornir_netmiko.tasks import netmiko_send_command @@ -23,7 +24,6 @@ InventoryPluginRegister.register("nautobot-inventory", NautobotORMInventory) InventoryPluginRegister.register("empty-inventory", EmptyInventory) -TransformFunctionRegister.register("transform_to_add_command_parser_info", add_platform_parsing_info) def deduplicate_command_list(data): @@ -46,10 +46,10 @@ def deduplicate_command_list(data): return unique_list -def _get_commands_to_run(yaml_parsed_info, command_getter_job): +def _get_commands_to_run(yaml_parsed_info): """Load yaml file and look up all commands that need to be run.""" all_commands = [] - for _, value in yaml_parsed_info[command_getter_job].items(): + for _, value in yaml_parsed_info.items(): # Deduplicate commands + parser key if value.get("commands"): # Means their isn't any "nested" structures. @@ -64,13 +64,14 @@ def _get_commands_to_run(yaml_parsed_info, command_getter_job): return deduplicate_command_list(all_commands) -def netmiko_send_commands(task: Task, command_getter_job: str): +def netmiko_send_commands(task: Task, command_getter_yaml_data: Dict, command_getter_job: str): """Run commands specified in PLATFORM_COMMAND_MAP.""" if not task.host.platform: return Result(host=task.host, result=f"{task.host.name} has no platform set.", failed=True) if task.host.platform not in list(NETMIKO_TO_NAPALM_STATIC.keys()): return Result(host=task.host, result=f"{task.host.name} has a unsupported platform set.", failed=True) - commands = _get_commands_to_run(task.host.data["platform_parsing_info"], command_getter_job) + task.host.data["platform_parsing_info"] = command_getter_yaml_data[task.host.platform] + commands = _get_commands_to_run(command_getter_yaml_data[task.host.platform][command_getter_job]) for command in commands: try: task.run( @@ -141,8 +142,6 @@ def command_getter_do(job_result, log_level, kwargs): logging={"enabled": False}, inventory={ "plugin": "empty-inventory", - # Can't use this since we're dynamically generating inventory on demand. - # "transform_function": "transform_to_add_command_parser_info", }, ) as nornir_obj: nr_with_processors = nornir_obj.with_processors([ProcessorDO(logger, compiled_results, kwargs)]) @@ -178,7 +177,11 @@ def command_getter_do(job_result, log_level, kwargs): else: single_host_inventory_constructed = _set_inventory(entered_ip, platform, port, username, password) nr_with_processors.inventory.hosts.update(single_host_inventory_constructed) - nr_with_processors.run(task=netmiko_send_commands, command_getter_job="device_onboarding") + nr_with_processors.run( + task=netmiko_send_commands, + command_getter_yaml_data=nr_with_processors.inventory.defaults.data["platform_parsing_info"], + command_getter_job="device_onboarding", + ) except Exception as err: # pylint: disable=broad-exception-caught logger.error(err) return compiled_results @@ -189,7 +192,6 @@ def command_getter_ni(job_result, log_level, kwargs): logger = NornirLogger(job_result, log_level) try: compiled_results = {} - # qs = get_job_filter(kwargs) qs = kwargs["devices"] if not qs: return None @@ -201,12 +203,16 @@ def command_getter_ni(job_result, log_level, kwargs): "options": { "credentials_class": NORNIR_SETTINGS.get("credentials"), "queryset": qs, + "defaults": {"platform_parsing_info": add_platform_parsing_info()}, }, - "transform_function": "transform_to_add_command_parser_info", }, ) as nornir_obj: nr_with_processors = nornir_obj.with_processors([ProcessorDO(logger, compiled_results, kwargs)]) - nr_with_processors.run(task=netmiko_send_commands, command_getter_job="network_importer") + nr_with_processors.run( + task=netmiko_send_commands, + command_getter_yaml_data=nr_with_processors.inventory.defaults.data["platform_parsing_info"], + command_getter_job="network_importer", + ) except Exception as err: # pylint: disable=broad-exception-caught logger.info("Error: %s", err) return err diff --git a/nautobot_device_onboarding/nornir_plays/empty_inventory.py b/nautobot_device_onboarding/nornir_plays/empty_inventory.py index d9531d72..dd01e7dd 100755 --- a/nautobot_device_onboarding/nornir_plays/empty_inventory.py +++ b/nautobot_device_onboarding/nornir_plays/empty_inventory.py @@ -1,6 +1,7 @@ """Empty Nornir Inventory Plugin.""" from nornir.core.inventory import Defaults, Groups, Hosts, Inventory +from nautobot_device_onboarding.nornir_plays.transform import add_platform_parsing_info class EmptyInventory: @@ -9,6 +10,6 @@ class EmptyInventory: def load(self) -> Inventory: """Create a default empty inventory.""" hosts = Hosts() - defaults = Defaults(data={}) + defaults = Defaults(data={"platform_parsing_info": add_platform_parsing_info()}) groups = Groups() return Inventory(hosts=hosts, groups=groups, defaults=defaults) diff --git a/nautobot_device_onboarding/nornir_plays/formatter.py b/nautobot_device_onboarding/nornir_plays/formatter.py index f1c61eaa..61f02583 100755 --- a/nautobot_device_onboarding/nornir_plays/formatter.py +++ b/nautobot_device_onboarding/nornir_plays/formatter.py @@ -27,21 +27,6 @@ def get_django_env(): j2_env["undefined"] = import_string(j2_env["undefined"]) jinja_env = SandboxedEnvironment(**j2_env) jinja_env.filters = engines["jinja"].env.filters - # https://docs.nautobot.com/projects/core/en/stable/development/apps/api/platform-features/jinja2-filters/ - # - # if PLUGIN_CFG.get("custom_post_processing_filters"): - # for filter_name, filter_function in PLUGIN_CFG["custom_post_processing_filters"].items(): - # try: - # func = import_string(filter_function) - # except Exception as error: # pylint: disable=broad-except - # msg = ( - # "There was an issue attempting to import the custom post_processing filters of" - # f" {filter_name} this is expected with a local configuration issue " - # "and not related to the Device Onboarding App, please contact your system admin for further details" - # ) - # raise Exception(msg).with_traceback(error.__traceback__) - # jinja_env.filters[filter_name] = func - # jinja_env.filters["fix_interfaces"] = fix_interfaces return jinja_env diff --git a/nautobot_device_onboarding/nornir_plays/jinja_filters.py b/nautobot_device_onboarding/nornir_plays/jinja_filters.py index 52377259..1b32a2dd 100755 --- a/nautobot_device_onboarding/nornir_plays/jinja_filters.py +++ b/nautobot_device_onboarding/nornir_plays/jinja_filters.py @@ -1,10 +1,10 @@ """Filters for Jinja2 PostProcessing.""" +# https://docs.nautobot.com/projects/core/en/stable/development/apps/api/platform-features/jinja2-filters/ + from django_jinja import library from nautobot_device_onboarding.constants import INTERFACE_TYPE_MAP_STATIC -# https://docs.nautobot.com/projects/core/en/stable/development/apps/api/platform-features/jinja2-filters/ - @library.filter def map_interface_type(interface_type): diff --git a/nautobot_device_onboarding/nornir_plays/transform.py b/nautobot_device_onboarding/nornir_plays/transform.py index f98ca1eb..3bbfef8d 100755 --- a/nautobot_device_onboarding/nornir_plays/transform.py +++ b/nautobot_device_onboarding/nornir_plays/transform.py @@ -1,4 +1,4 @@ -"""Nornir tranform function to add command mapper, platform parsing info.""" +"""Adds command mapper, platform parsing info.""" import os import yaml @@ -7,8 +7,8 @@ DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), "command_mappers")) -def add_platform_parsing_info(host): - """This nornir transform function adds platform parsing info.""" +def add_platform_parsing_info(): + """Merges platform command mapper from repo or defaults.""" if ( GitRepository.objects.filter( provided_contents=["nautobot_device_onboarding.onboarding_command_mappers"] @@ -23,14 +23,7 @@ def add_platform_parsing_info(host): else: command_mappers_repo_path = {} command_mapper_defaults = load_command_mappers_from_dir(DATA_DIR) - # parsing_info = _get_default_platform_parsing_info(host.platform) merged_command_mappers = {**command_mapper_defaults, **command_mappers_repo_path} - # This is so we can reuse this for a non-nornir host object since we don't have it in an empty inventory at this point. - if not isinstance(host, str): - if host.platform == 'cisco_xe': - host.data.update({"platform_parsing_info": merged_command_mappers['cisco_ios']}) - else: - host.data.update({"platform_parsing_info": merged_command_mappers[host.platform]}) return merged_command_mappers diff --git a/poetry.lock b/poetry.lock index 069befbf..4b957c38 100755 --- a/poetry.lock +++ b/poetry.lock @@ -1,9 +1,10 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand. [[package]] name = "amqp" version = "5.2.0" description = "Low-level AMQP client for Python (fork of amqplib)." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -18,6 +19,7 @@ vine = ">=5.0.0,<6.0.0" name = "aniso8601" version = "7.0.0" description = "A library for parsing ISO 8601 strings." +category = "main" optional = false python-versions = "*" files = [ @@ -29,6 +31,7 @@ files = [ name = "anyio" version = "4.3.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -51,6 +54,7 @@ trio = ["trio (>=0.23)"] name = "appnope" version = "0.1.4" description = "Disable App Nap on macOS >= 10.9" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -62,6 +66,7 @@ files = [ name = "asgiref" version = "3.8.1" description = "ASGI specs, helper code, and adapters" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -79,6 +84,7 @@ tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] name = "astroid" version = "3.1.0" description = "An abstract syntax tree for Python with inference support." +category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -93,6 +99,7 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} name = "asttokens" version = "2.4.1" description = "Annotate AST trees with source code positions" +category = "dev" optional = false python-versions = "*" files = [ @@ -111,6 +118,7 @@ test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] name = "astunparse" version = "1.6.3" description = "An AST unparser for Python" +category = "dev" optional = false python-versions = "*" files = [ @@ -126,6 +134,7 @@ wheel = ">=0.23.0,<1.0" name = "async-timeout" version = "4.0.3" description = "Timeout context manager for asyncio programs" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -137,6 +146,7 @@ files = [ name = "attrs" version = "23.2.0" description = "Classes Without Boilerplate" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -154,23 +164,25 @@ tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "p [[package]] name = "autopep8" -version = "2.0.0" +version = "2.1.0" description = "A tool that automatically formats Python code to conform to the PEP 8 style guide" +category = "main" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "autopep8-2.0.0-py2.py3-none-any.whl", hash = "sha256:ad924b42c2e27a1ac58e432166cc4588f5b80747de02d0d35b1ecbd3e7d57207"}, - {file = "autopep8-2.0.0.tar.gz", hash = "sha256:8b1659c7f003e693199f52caffdc06585bb0716900bbc6a7442fd931d658c077"}, + {file = "autopep8-2.1.0-py2.py3-none-any.whl", hash = "sha256:2bb76888c5edbcafe6aabab3c47ba534f5a2c2d245c2eddced4a30c4b4946357"}, + {file = "autopep8-2.1.0.tar.gz", hash = "sha256:1fa8964e4618929488f4ec36795c7ff12924a68b8bf01366c094fc52f770b6e7"}, ] [package.dependencies] -pycodestyle = ">=2.9.1" -tomli = "*" +pycodestyle = ">=2.11.0" +tomli = {version = "*", markers = "python_version < \"3.11\""} [[package]] name = "backcall" version = "0.2.0" description = "Specifications for callback functions passed in to an API" +category = "dev" optional = false python-versions = "*" files = [ @@ -182,6 +194,7 @@ files = [ name = "backports-zoneinfo" version = "0.2.1" description = "Backport of the standard library zoneinfo module" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -213,6 +226,7 @@ tzdata = ["tzdata"] name = "bandit" version = "1.7.8" description = "Security oriented static analyser for python code." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -237,6 +251,7 @@ yaml = ["PyYAML"] name = "bcrypt" version = "4.1.2" description = "Modern password hashing for your software and your servers" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -277,6 +292,7 @@ typecheck = ["mypy"] name = "billiard" version = "4.2.0" description = "Python multiprocessing fork with improvements and bugfixes" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -288,6 +304,7 @@ files = [ name = "black" version = "24.3.0" description = "The uncompromising code formatter." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -334,6 +351,7 @@ uvloop = ["uvloop (>=0.15.2)"] name = "celery" version = "5.3.6" description = "Distributed Task Queue." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -390,6 +408,7 @@ zstd = ["zstandard (==0.22.0)"] name = "certifi" version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -401,6 +420,7 @@ files = [ name = "cffi" version = "1.16.0" description = "Foreign Function Interface for Python calling C code." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -465,6 +485,7 @@ pycparser = "*" name = "charset-normalizer" version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -564,6 +585,7 @@ files = [ name = "click" version = "8.1.7" description = "Composable command line interface toolkit" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -578,6 +600,7 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "click-default-group" version = "1.2.4" description = "click_default_group" +category = "dev" optional = false python-versions = ">=2.7" files = [ @@ -595,6 +618,7 @@ test = ["pytest"] name = "click-didyoumean" version = "0.3.1" description = "Enables git-like *did-you-mean* feature in click" +category = "main" optional = false python-versions = ">=3.6.2" files = [ @@ -609,6 +633,7 @@ click = ">=7" name = "click-plugins" version = "1.1.1" description = "An extension module for click to enable registering CLI commands via setuptools entry-points." +category = "main" optional = false python-versions = "*" files = [ @@ -626,6 +651,7 @@ dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] name = "click-repl" version = "0.3.0" description = "REPL plugin for Click" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -644,6 +670,7 @@ testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -655,6 +682,7 @@ files = [ name = "coverage" version = "7.4.4" description = "Code coverage measurement for Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -719,6 +747,7 @@ toml = ["tomli"] name = "cron-descriptor" version = "1.4.3" description = "A Python library that converts cron expressions into human readable strings." +category = "main" optional = false python-versions = "*" files = [ @@ -733,6 +762,7 @@ dev = ["polib"] name = "cryptography" version = "42.0.5" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -787,6 +817,7 @@ test-randomorder = ["pytest-randomly"] name = "decorator" version = "5.1.1" description = "Decorators for Humans" +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -798,6 +829,7 @@ files = [ name = "deepdiff" version = "6.7.1" description = "Deep Difference and Search of any Python object/data. Recreate objects by adding adding deltas to each other." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -816,6 +848,7 @@ optimize = ["orjson"] name = "defusedxml" version = "0.7.1" description = "XML bomb protection for Python stdlib modules" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -827,6 +860,7 @@ files = [ name = "diffsync" version = "1.10.0" description = "Library to easily sync/diff/update 2 different data sources" +category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -848,6 +882,7 @@ redis = ["redis (>=4.3,<5.0)"] name = "dill" version = "0.3.8" description = "serialize all of Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -863,6 +898,7 @@ profile = ["gprof2dot (>=2022.7.29)"] name = "django" version = "3.2.25" description = "A high-level Python Web framework that encourages rapid development and clean, pragmatic design." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -883,6 +919,7 @@ bcrypt = ["bcrypt"] name = "django-ajax-tables" version = "1.1.1" description = "Django tag for ajax-enabled tables" +category = "main" optional = false python-versions = "*" files = [ @@ -894,6 +931,7 @@ files = [ name = "django-celery-beat" version = "2.5.0" description = "Database-backed Periodic Tasks." +category = "main" optional = false python-versions = "*" files = [ @@ -914,6 +952,7 @@ tzdata = "*" name = "django-celery-results" version = "2.4.0" description = "Celery result backends for Django." +category = "main" optional = false python-versions = "*" files = [ @@ -928,6 +967,7 @@ celery = ">=5.2.3,<6.0" name = "django-constance" version = "2.9.1" description = "Django live settings with pluggable backends, including Redis." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -946,6 +986,7 @@ redis = ["redis"] name = "django-cors-headers" version = "4.2.0" description = "django-cors-headers is a Django application for handling the server headers required for Cross-Origin Resource Sharing (CORS)." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -960,6 +1001,7 @@ Django = ">=3.2" name = "django-db-file-storage" version = "0.5.6.1" description = "Custom FILE_STORAGE for Django. Saves files in your database instead of your file system." +category = "main" optional = false python-versions = "*" files = [ @@ -974,6 +1016,7 @@ Django = "*" name = "django-debug-toolbar" version = "4.3.0" description = "A configurable set of panels that display various debug information about the current request/response." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -989,6 +1032,7 @@ sqlparse = ">=0.2" name = "django-extensions" version = "3.2.3" description = "Extensions for Django" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1003,6 +1047,7 @@ Django = ">=3.2" name = "django-filter" version = "23.1" description = "Django-filter is a reusable Django application for allowing users to filter querysets dynamically." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1017,6 +1062,7 @@ Django = ">=3.2" name = "django-health-check" version = "3.17.0" description = "Run checks on services like databases, queue servers, celery processes, etc." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1035,6 +1081,7 @@ test = ["celery", "pytest", "pytest-cov", "pytest-django", "redis"] name = "django-jinja" version = "2.10.2" description = "Jinja2 templating language integrated in Django." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1048,13 +1095,14 @@ jinja2 = ">=3" [[package]] name = "django-picklefield" -version = "3.1" +version = "3.2" description = "Pickled object field for Django" +category = "main" optional = false python-versions = ">=3" files = [ - {file = "django-picklefield-3.1.tar.gz", hash = "sha256:c786cbeda78d6def2b43bff4840d19787809c8909f7ad683961703060398d356"}, - {file = "django_picklefield-3.1-py3-none-any.whl", hash = "sha256:d77c504df7311e8ec14e8b779f10ca6fec74de6c7f8e2c136e1ef60cf955125d"}, + {file = "django-picklefield-3.2.tar.gz", hash = "sha256:aa463f5d79d497dbe789f14b45180f00a51d0d670067d0729f352a3941cdfa4d"}, + {file = "django_picklefield-3.2-py3-none-any.whl", hash = "sha256:e9a73539d110f69825d9320db18bcb82e5189ff48dbed41821c026a20497764c"}, ] [package.dependencies] @@ -1067,6 +1115,7 @@ tests = ["tox"] name = "django-prometheus" version = "2.3.1" description = "Django middlewares to monitor your application with Prometheus.io." +category = "main" optional = false python-versions = "*" files = [ @@ -1081,6 +1130,7 @@ prometheus-client = ">=0.7" name = "django-redis" version = "5.3.0" description = "Full featured redis cache backend for Django." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1099,6 +1149,7 @@ hiredis = ["redis[hiredis] (>=3,!=4.0.0,!=4.0.1)"] name = "django-silk" version = "5.1.0" description = "Silky smooth profiling for the Django Framework" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1116,6 +1167,7 @@ sqlparse = "*" name = "django-tables2" version = "2.6.0" description = "Table/data-grid framework for Django" +category = "main" optional = false python-versions = "*" files = [ @@ -1133,6 +1185,7 @@ tablib = ["tablib"] name = "django-taggit" version = "4.0.0" description = "django-taggit is a reusable Django application for simple tagging." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1147,6 +1200,7 @@ Django = ">=3.2" name = "django-timezone-field" version = "5.1" description = "A Django app providing DB, form, and REST framework fields for zoneinfo and pytz timezone objects." +category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -1156,13 +1210,14 @@ files = [ [package.dependencies] "backports.zoneinfo" = {version = ">=0.2.1,<0.3.0", markers = "python_version < \"3.9\""} -Django = ">=2.2,<3.0.dev0 || >=3.2.dev0,<5.0" +Django = ">=2.2,<3.0.0 || >=3.2.0,<5.0" pytz = "*" [[package]] name = "django-tree-queries" version = "0.16.1" description = "Tree queries with explicit opt-in, without configurability" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1177,6 +1232,7 @@ tests = ["coverage"] name = "django-webserver" version = "1.2.0" description = "Django management commands for production webservers" +category = "main" optional = false python-versions = "*" files = [ @@ -1198,6 +1254,7 @@ waitress = ["waitress"] name = "djangorestframework" version = "3.14.0" description = "Web APIs for Django, made easy." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1213,6 +1270,7 @@ pytz = "*" name = "drf-react-template-framework" version = "0.0.17" description = "Django REST Framework plugin that creates form schemas for react-jsonschema-form" +category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -1227,6 +1285,7 @@ djangorestframework = ">=3.12.0,<4.0.0" name = "drf-spectacular" version = "0.26.3" description = "Sane and flexible OpenAPI 3 schema generation for Django REST framework" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1251,6 +1310,7 @@ sidecar = ["drf-spectacular-sidecar"] name = "drf-spectacular-sidecar" version = "2024.4.1" description = "Serve self-contained distribution builds of Swagger UI and Redoc with Django" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1265,6 +1325,7 @@ Django = ">=2.2" name = "emoji" version = "2.8.0" description = "Emoji for Python" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1279,6 +1340,7 @@ dev = ["coverage", "coveralls", "pytest"] name = "exceptiongroup" version = "1.2.0" description = "Backport of PEP 654 (exception groups)" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1293,6 +1355,7 @@ test = ["pytest (>=6)"] name = "executing" version = "2.0.1" description = "Get the currently executing AST node of a frame, and other information" +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1305,24 +1368,26 @@ tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipyth [[package]] name = "flake8" -version = "5.0.4" -description = "the modular source code checker: pep8 pyflakes and co" +version = "2.3.0" +description = "the modular source code checker: pep8, pyflakes and co" +category = "dev" optional = false -python-versions = ">=3.6.1" +python-versions = "*" files = [ - {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, - {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, + {file = "flake8-2.3.0-py2.py3-none-any.whl", hash = "sha256:c99cc9716d6655d9c8bcb1e77632b8615bf0abd282d7abd9f5c2148cad7fc669"}, + {file = "flake8-2.3.0.tar.gz", hash = "sha256:5ee1a43ccd0716d6061521eec6937c983efa027793013e572712c4da55c7c83e"}, ] [package.dependencies] -mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.9.0,<2.10.0" -pyflakes = ">=2.5.0,<2.6.0" +mccabe = ">=0.2.1" +pep8 = ">=1.5.7" +pyflakes = ">=0.8.1" [[package]] name = "future" version = "1.0.0" description = "Clean single-source support for Python 3 and 2" +category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1334,6 +1399,7 @@ files = [ name = "ghp-import" version = "2.1.0" description = "Copy your docs directly to the gh-pages branch." +category = "dev" optional = false python-versions = "*" files = [ @@ -1351,6 +1417,7 @@ dev = ["flake8", "markdown", "twine", "wheel"] name = "gitdb" version = "4.0.11" description = "Git Object Database" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1365,6 +1432,7 @@ smmap = ">=3.0.1,<6" name = "gitpython" version = "3.1.43" description = "GitPython is a Python library used to interact with Git repositories" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1383,6 +1451,7 @@ test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", name = "gprof2dot" version = "2022.7.29" description = "Generate a dot graph from the output of several profilers." +category = "main" optional = false python-versions = ">=2.7" files = [ @@ -1394,6 +1463,7 @@ files = [ name = "graphene" version = "2.1.9" description = "GraphQL Framework for Python" +category = "main" optional = false python-versions = "*" files = [ @@ -1416,6 +1486,7 @@ test = ["coveralls", "fastdiff (==0.2.0)", "iso8601", "mock", "promise", "pytest name = "graphene-django" version = "2.16.0" description = "Graphene Django integration" +category = "main" optional = false python-versions = "*" files = [ @@ -1440,6 +1511,7 @@ test = ["coveralls", "django-filter (>=2)", "djangorestframework (>=3.6.3)", "mo name = "graphene-django-optimizer" version = "0.8.0" description = "Optimize database access inside graphene queries." +category = "main" optional = false python-versions = "*" files = [ @@ -1450,6 +1522,7 @@ files = [ name = "graphql-core" version = "2.3.2" description = "GraphQL implementation for Python" +category = "main" optional = false python-versions = "*" files = [ @@ -1470,6 +1543,7 @@ test = ["coveralls (==1.11.1)", "cython (==0.29.17)", "gevent (==1.5.0)", "pyann name = "graphql-relay" version = "2.0.1" description = "Relay implementation for Python" +category = "main" optional = false python-versions = "*" files = [ @@ -1486,6 +1560,7 @@ six = ">=1.12" name = "griffe" version = "0.42.1" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1501,6 +1576,7 @@ colorama = ">=0.4" name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1512,6 +1588,7 @@ files = [ name = "httpcore" version = "1.0.5" description = "A minimal low-level HTTP client." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1526,13 +1603,14 @@ h11 = ">=0.13,<0.15" [package.extras] asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] trio = ["trio (>=0.22.0,<0.26.0)"] [[package]] name = "httpx" version = "0.27.0" description = "The next generation HTTP client." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1543,20 +1621,21 @@ files = [ [package.dependencies] anyio = "*" certifi = "*" -httpcore = "==1.*" +httpcore = ">=1.0.0,<2.0.0" idna = "*" sniffio = "*" [package.extras] brotli = ["brotli", "brotlicffi"] -cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] [[package]] name = "idna" version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1568,6 +1647,7 @@ files = [ name = "importlib-metadata" version = "4.13.0" description = "Read metadata from Python packages" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1587,6 +1667,7 @@ testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packag name = "importlib-resources" version = "6.4.0" description = "Read resources from Python packages" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1605,6 +1686,7 @@ testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "p name = "incremental" version = "22.10.0" description = "\"A small library that versions your Python projects.\"" +category = "dev" optional = false python-versions = "*" files = [ @@ -1620,6 +1702,7 @@ scripts = ["click (>=6.0)", "twisted (>=16.4.0)"] name = "inflection" version = "0.5.1" description = "A port of Ruby on Rails inflector to Python" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1631,6 +1714,7 @@ files = [ name = "invoke" version = "2.2.0" description = "Pythonic task execution" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1642,6 +1726,7 @@ files = [ name = "ipython" version = "8.12.3" description = "IPython: Productive Interactive Computing" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1681,6 +1766,7 @@ test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pa name = "isort" version = "5.13.2" description = "A Python utility / library to sort Python imports." +category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -1695,6 +1781,7 @@ colors = ["colorama (>=0.4.6)"] name = "jdiff" version = "0.0.6" description = "A light-weight library to compare structured output from network devices show commands." +category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -1710,6 +1797,7 @@ jmespath = ">=1.0.1,<2.0.0" name = "jedi" version = "0.19.1" description = "An autocompletion tool for Python that can be used for text editors." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1729,6 +1817,7 @@ testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] name = "jinja2" version = "3.1.3" description = "A very fast and expressive template engine." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1746,6 +1835,7 @@ i18n = ["Babel (>=2.7)"] name = "jmespath" version = "1.0.1" description = "JSON Matching Expressions" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1757,6 +1847,7 @@ files = [ name = "jsonschema" version = "4.18.6" description = "An implementation of JSON Schema validation for Python" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1780,6 +1871,7 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- name = "jsonschema-specifications" version = "2023.12.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1795,6 +1887,7 @@ referencing = ">=0.31.0" name = "junos-eznc" version = "2.7.0" description = "Junos 'EZ' automation for non-programmers" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1819,6 +1912,7 @@ yamlordereddictloader = "*" name = "kombu" version = "5.3.6" description = "Messaging library for Python." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1851,124 +1945,167 @@ zookeeper = ["kazoo (>=2.8.0)"] [[package]] name = "lxml" -version = "5.2.0" +version = "5.2.1" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "lxml-5.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c54f8d6160080831a76780d850302fdeb0e8d0806f661777b0714dfb55d9a08a"}, - {file = "lxml-5.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0e95ae029396382a0d2e8174e4077f96befcd4a2184678db363ddc074eb4d3b2"}, - {file = "lxml-5.2.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5810fa80e64a0c689262a71af999c5735f48c0da0affcbc9041d1ef5ef3920be"}, - {file = "lxml-5.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae69524fd6a68b288574013f8fadac23cacf089c75cd3fc5b216277a445eb736"}, - {file = "lxml-5.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fadda215e32fe375d65e560b7f7e2a37c7f9c4ecee5315bb1225ca6ac9bf5838"}, - {file = "lxml-5.2.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:f1f164e4cc6bc646b1fc86664c3543bf4a941d45235797279b120dc740ee7af5"}, - {file = "lxml-5.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:3603a8a41097daf7672cae22cc4a860ab9ea5597f1c5371cb21beca3398b8d6a"}, - {file = "lxml-5.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b3b4bb89a785f4fd60e05f3c3a526c07d0d68e3536f17f169ca13bf5b5dd75a5"}, - {file = "lxml-5.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1effc10bf782f0696e76ecfeba0720ea02c0c31d5bffb7b29ba10debd57d1c3d"}, - {file = "lxml-5.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b03531f6cd6ce4b511dcece060ca20aa5412f8db449274b44f4003f282e6272f"}, - {file = "lxml-5.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7fac15090bb966719df06f0c4f8139783746d1e60e71016d8a65db2031ca41b8"}, - {file = "lxml-5.2.0-cp310-cp310-win32.whl", hash = "sha256:92bb37c96215c4b2eb26f3c791c0bf02c64dd251effa532b43ca5049000c4478"}, - {file = "lxml-5.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:b0181c22fdb89cc19e70240a850e5480817c3e815b1eceb171b3d7a3aa3e596a"}, - {file = "lxml-5.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ada8ce9e6e1d126ef60d215baaa0c81381ba5841c25f1d00a71cdafdc038bd27"}, - {file = "lxml-5.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3cefb133c859f06dab2ae63885d9f405000c4031ec516e0ed4f9d779f690d8e3"}, - {file = "lxml-5.2.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ede2a7a86a977b0c741654efaeca0af7860a9b1ae39f9268f0936246a977ee0"}, - {file = "lxml-5.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d46df6f0b1a0cda39d12c5c4615a7d92f40342deb8001c7b434d7c8c78352e58"}, - {file = "lxml-5.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2259243ee734cc736e237719037efb86603c891fd363cc7973a2d0ac8a0e3f"}, - {file = "lxml-5.2.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:c53164f29ed3c3868787144e8ea8a399ffd7d8215f59500a20173593c19e96eb"}, - {file = "lxml-5.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:371aab9a397dcc76625ad3b02fa9b21be63406d69237b773156e7d1fc2ce0cae"}, - {file = "lxml-5.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e08784288a179b59115b5e57abf6d387528b39abb61105fe17510a199a277a40"}, - {file = "lxml-5.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4c232726f7b6df5143415a06323faaa998ef8abbe1c0ed00d718755231d76f08"}, - {file = "lxml-5.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e4366e58c0508da4dee4c7c70cee657e38553d73abdffa53abbd7d743711ee11"}, - {file = "lxml-5.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c84dce8fb2e900d4fb094e76fdad34a5fd06de53e41bddc1502c146eb11abd74"}, - {file = "lxml-5.2.0-cp311-cp311-win32.whl", hash = "sha256:0947d1114e337dc2aae2fa14bbc9ed5d9ca1a0acd6d2f948df9926aef65305e9"}, - {file = "lxml-5.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:1eace37a9f4a1bef0bb5c849434933fd6213008ec583c8e31ee5b8e99c7c8500"}, - {file = "lxml-5.2.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f2cb157e279d28c66b1c27e0948687dc31dc47d1ab10ce0cd292a8334b7de3d5"}, - {file = "lxml-5.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:53c0e56f41ef68c1ce4e96f27ecdc2df389730391a2fd45439eb3facb02d36c8"}, - {file = "lxml-5.2.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:703d60e59ab45c17485c2c14b11880e4f7f0eab07134afa9007573fa5a779a5a"}, - {file = "lxml-5.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaf5e308a5e50bc0548c4fdca0117a31ec9596f8cfc96592db170bcecc71a957"}, - {file = "lxml-5.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af64df85fecd3cf3b2e792f0b5b4d92740905adfa8ce3b24977a55415f1a0c40"}, - {file = "lxml-5.2.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:df7dfbdef11702fd22c2eaf042d7098d17edbc62d73f2199386ad06cbe466f6d"}, - {file = "lxml-5.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:7250030a7835bfd5ba6ca7d1ad483ec90f9cbc29978c5e75c1cc3e031d3c4160"}, - {file = "lxml-5.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:be5faa2d5c8c8294d770cfd09d119fb27b5589acc59635b0cf90f145dbe81dca"}, - {file = "lxml-5.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:347ec08250d5950f5b016caa3e2e13fb2cb9714fe6041d52e3716fb33c208663"}, - {file = "lxml-5.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:dc7b630c4fb428b8a40ddd0bfc4bc19de11bb3c9b031154f77360e48fe8b4451"}, - {file = "lxml-5.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ae550cbd7f229cdf2841d9b01406bcca379a5fb327b9efb53ba620a10452e835"}, - {file = "lxml-5.2.0-cp312-cp312-win32.whl", hash = "sha256:7c61ce3cdd6e6c9f4003ac118be7eb3036d0ce2afdf23929e533e54482780f74"}, - {file = "lxml-5.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:f90c36ca95a44d2636bbf55a51ca30583b59b71b6547b88d954e029598043551"}, - {file = "lxml-5.2.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:1cce2eaad7e38b985b0f91f18468dda0d6b91862d32bec945b0e46e2ffe7222e"}, - {file = "lxml-5.2.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:60a3983d32f722a8422c01e4dc4badc7a307ca55c59e2485d0e14244a52c482f"}, - {file = "lxml-5.2.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60847dfbdfddf08a56c4eefe48234e8c1ab756c7eda4a2a7c1042666a5516564"}, - {file = "lxml-5.2.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bbe335f0d1a86391671d975a1b5e9b08bb72fba6b567c43bdc2e55ca6e6c086"}, - {file = "lxml-5.2.0-cp36-cp36m-manylinux_2_28_aarch64.whl", hash = "sha256:3ac7c8a60b8ad51fe7bca99a634dd625d66492c502fd548dc6dc769ce7d94b6a"}, - {file = "lxml-5.2.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:73e69762cf740ac3ae81137ef9d6f15f93095f50854e233d50b29e7b8a91dbc6"}, - {file = "lxml-5.2.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:281ee1ffeb0ab06204dfcd22a90e9003f0bb2dab04101ad983d0b1773bc10588"}, - {file = "lxml-5.2.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ba3a86b0d5a5c93104cb899dff291e3ae13729c389725a876d00ef9696de5425"}, - {file = "lxml-5.2.0-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:356f8873b1e27b81793e30144229adf70f6d3e36e5cb7b6d289da690f4398953"}, - {file = "lxml-5.2.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:2a34e74ffe92c413f197ff4967fb1611d938ee0691b762d062ef0f73814f3aa4"}, - {file = "lxml-5.2.0-cp36-cp36m-win32.whl", hash = "sha256:6f0d2b97a5a06c00c963d4542793f3e486b1ed3a957f8c19f6006ed39d104bb0"}, - {file = "lxml-5.2.0-cp36-cp36m-win_amd64.whl", hash = "sha256:35e39c6fd089ad6674eb52d93aa874d6027b3ae44d2381cca6e9e4c2e102c9c8"}, - {file = "lxml-5.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5f6e4e5a62114ae76690c4a04c5108d067442d0a41fd092e8abd25af1288c450"}, - {file = "lxml-5.2.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93eede9bcc842f891b2267c7f0984d811940d1bc18472898a1187fe560907a99"}, - {file = "lxml-5.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad364026c2cebacd7e01d1138bd53639822fefa8f7da90fc38cd0e6319a2699"}, - {file = "lxml-5.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f06e4460e76468d99cc36d5b9bc6fc5f43e6662af44960e13e3f4e040aacb35"}, - {file = "lxml-5.2.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:ca3236f31d565555139d5b00b790ed2a98ac6f0c4470c4032f8b5e5a5dba3c1a"}, - {file = "lxml-5.2.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:a9b67b850ab1d304cb706cf71814b0e0c3875287083d7ec55ee69504a9c48180"}, - {file = "lxml-5.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5261c858c390ae9a19aba96796948b6a2d56649cbd572968970dc8da2b2b2a42"}, - {file = "lxml-5.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e8359fb610c8c444ac473cfd82dae465f405ff807cabb98a9b9712bbd0028751"}, - {file = "lxml-5.2.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:f9e27841cddfaebc4e3ffbe5dbdff42891051acf5befc9f5323944b2c61cef16"}, - {file = "lxml-5.2.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:641a8da145aca67671205f3e89bfec9815138cf2fe06653c909eab42e486d373"}, - {file = "lxml-5.2.0-cp37-cp37m-win32.whl", hash = "sha256:931a3a13e0f574abce8f3152b207938a54304ccf7a6fd7dff1fdb2f6691d08af"}, - {file = "lxml-5.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:246c93e2503c710cf02c7e9869dc0258223cbefe5e8f9ecded0ac0aa07fd2bf8"}, - {file = "lxml-5.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:11acfcdf5a38cf89c48662123a5d02ae0a7d99142c7ee14ad90de5c96a9b6f06"}, - {file = "lxml-5.2.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:200f70b5d95fc79eb9ed7f8c4888eef4e274b9bf380b829d3d52e9ed962e9231"}, - {file = "lxml-5.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba4d02aed47c25be6775a40d55c5774327fdedba79871b7c2485e80e45750cb2"}, - {file = "lxml-5.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e283b24c14361fe9e04026a1d06c924450415491b83089951d469509900d9f32"}, - {file = "lxml-5.2.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:03e3962d6ad13a862dacd5b3a3ea60b4d092a550f36465234b8639311fd60989"}, - {file = "lxml-5.2.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6e45fd5213e5587a610b7e7c8c5319a77591ab21ead42df46bb342e21bc1418d"}, - {file = "lxml-5.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:27877732946843f4b6bfc56eb40d865653eef34ad2edeed16b015d5c29c248df"}, - {file = "lxml-5.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4d16b44ad0dd8c948129639e34c8d301ad87ebc852568ace6fe9a5ad9ce67ee1"}, - {file = "lxml-5.2.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:b8f842df9ba26135c5414e93214e04fe0af259bb4f96a32f756f89467f7f3b45"}, - {file = "lxml-5.2.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c74e77df9e36c8c91157853e6cd400f6f9ca7a803ba89981bfe3f3fc7e5651ef"}, - {file = "lxml-5.2.0-cp38-cp38-win32.whl", hash = "sha256:1459a998c10a99711ac532abe5cc24ba354e4396dafef741c7797f8830712d56"}, - {file = "lxml-5.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:a00f5931b7cccea775123c3c0a2513aee58afdad8728550cc970bff32280bdd2"}, - {file = "lxml-5.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ddda5ba8831f258ac7e6364be03cb27aa62f50c67fd94bc1c3b6247959cc0369"}, - {file = "lxml-5.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:56835b9e9a7767202fae06310c6b67478963e535fe185bed3bf9af5b18d2b67e"}, - {file = "lxml-5.2.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25fef8794f0dc89f01bdd02df6a7fec4bcb2fbbe661d571e898167a83480185e"}, - {file = "lxml-5.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32d44af078485c4da9a7ec460162392d49d996caf89516fa0b75ad0838047122"}, - {file = "lxml-5.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f354d62345acdf22aa3e171bd9723790324a66fafe61bfe3873b86724cf6daaa"}, - {file = "lxml-5.2.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6a7e0935f05e1cf1a3aa1d49a87505773b04f128660eac2a24a5594ea6b1baa7"}, - {file = "lxml-5.2.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:75a4117b43694c72a0d89f6c18a28dc57407bde4650927d4ef5fd384bdf6dcc7"}, - {file = "lxml-5.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:57402d6cdd8a897ce21cf8d1ff36683583c17a16322a321184766c89a1980600"}, - {file = "lxml-5.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:56591e477bea531e5e1854f5dfb59309d5708669bc921562a35fd9ca5182bdcd"}, - {file = "lxml-5.2.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7efbce96719aa275d49ad5357886845561328bf07e1d5ab998f4e3066c5ccf15"}, - {file = "lxml-5.2.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a3c39def0965e8fb5c8d50973e0c7b4ce429a2fa730f3f9068a7f4f9ce78410b"}, - {file = "lxml-5.2.0-cp39-cp39-win32.whl", hash = "sha256:5188f22c00381cb44283ecb28c8d85c2db4a3035774dd851876c8647cb809c27"}, - {file = "lxml-5.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:ed1fe80e1fcdd1205a443bddb1ad3c3135bb1cd3f36cc996a1f4aed35960fbe8"}, - {file = "lxml-5.2.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d2b339fb790fc923ae2e9345c8633e3d0064d37ea7920c027f20c8ae6f65a91f"}, - {file = "lxml-5.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06036d60fccb21e22dd167f6d0e422b9cbdf3588a7e999a33799f9cbf01e41a5"}, - {file = "lxml-5.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a1611fb9de0a269c05575c024e6d8cdf2186e3fa52b364e3b03dcad82514d57"}, - {file = "lxml-5.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:05fc3720250d221792b6e0d150afc92d20cb10c9cdaa8c8f93c2a00fbdd16015"}, - {file = "lxml-5.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:11e41ffd3cd27b0ca1c76073b27bd860f96431d9b70f383990f1827ca19f2f52"}, - {file = "lxml-5.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0382e6a3eefa3f6699b14fa77c2eb32af2ada261b75120eaf4fc028a20394975"}, - {file = "lxml-5.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:be5c8e776ecbcf8c1bce71a7d90e3a3680c9ceae516cac0be08b47e9fac0ca43"}, - {file = "lxml-5.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da12b4efc93d53068888cb3b58e355b31839f2428b8f13654bd25d68b201c240"}, - {file = "lxml-5.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f46f8033da364bacc74aca5e319509a20bb711c8a133680ca5f35020f9eaf025"}, - {file = "lxml-5.2.0-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:50a26f68d090594477df8572babac64575cd5c07373f7a8319c527c8e56c0f99"}, - {file = "lxml-5.2.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:57cbadf028727705086047994d2e50124650e63ce5a035b0aa79ab50f001989f"}, - {file = "lxml-5.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:8aa11638902ac23f944f16ce45c9f04c9d5d57bb2da66822abb721f4efe5fdbb"}, - {file = "lxml-5.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b7150e630b879390e02121e71ceb1807f682b88342e2ea2082e2c8716cf8bd93"}, - {file = "lxml-5.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4add722393c99da4d51c8d9f3e1ddf435b30677f2d9ba9aeaa656f23c1b7b580"}, - {file = "lxml-5.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd0f25a431cd16f70ec1c47c10b413e7ddfe1ccaaddd1a7abd181e507c012374"}, - {file = "lxml-5.2.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:883e382695f346c2ea3ad96bdbdf4ca531788fbeedb4352be3a8fcd169fc387d"}, - {file = "lxml-5.2.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:80cc2b55bb6e35d3cb40936b658837eb131e9f16357241cd9ba106ae1e9c5ecb"}, - {file = "lxml-5.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:59ec2948385336e9901008fdf765780fe30f03e7fdba8090aafdbe5d1b7ea0cd"}, - {file = "lxml-5.2.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ddbea6e58cce1a640d9d65947f1e259423fc201c9cf9761782f355f53b7f3097"}, - {file = "lxml-5.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52d6cdea438eb7282c41c5ac00bd6d47d14bebb6e8a8d2a1c168ed9e0cacfbab"}, - {file = "lxml-5.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c556bbf88a8b667c849d326dd4dd9c6290ede5a33383ffc12b0ed17777f909d"}, - {file = "lxml-5.2.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:947fa8bf15d1c62c6db36c6ede9389cac54f59af27010251747f05bddc227745"}, - {file = "lxml-5.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e6cb8f7a332eaa2d876b649a748a445a38522e12f2168e5e838d1505a91cdbb7"}, - {file = "lxml-5.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:16e65223f34fd3d65259b174f0f75a4bb3d9893698e5e7d01e54cd8c5eb98d85"}, - {file = "lxml-5.2.0.tar.gz", hash = "sha256:21dc490cdb33047bc7f7ad76384f3366fa8f5146b86cc04c4af45de901393b90"}, + {file = "lxml-5.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1f7785f4f789fdb522729ae465adcaa099e2a3441519df750ebdccc481d961a1"}, + {file = "lxml-5.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6cc6ee342fb7fa2471bd9b6d6fdfc78925a697bf5c2bcd0a302e98b0d35bfad3"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:794f04eec78f1d0e35d9e0c36cbbb22e42d370dda1609fb03bcd7aeb458c6377"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817d420c60a5183953c783b0547d9eb43b7b344a2c46f69513d5952a78cddf3"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2213afee476546a7f37c7a9b4ad4d74b1e112a6fafffc9185d6d21f043128c81"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b070bbe8d3f0f6147689bed981d19bbb33070225373338df755a46893528104a"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e02c5175f63effbd7c5e590399c118d5db6183bbfe8e0d118bdb5c2d1b48d937"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:3dc773b2861b37b41a6136e0b72a1a44689a9c4c101e0cddb6b854016acc0aa8"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:d7520db34088c96cc0e0a3ad51a4fd5b401f279ee112aa2b7f8f976d8582606d"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:bcbf4af004f98793a95355980764b3d80d47117678118a44a80b721c9913436a"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a2b44bec7adf3e9305ce6cbfa47a4395667e744097faed97abb4728748ba7d47"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1c5bb205e9212d0ebddf946bc07e73fa245c864a5f90f341d11ce7b0b854475d"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2c9d147f754b1b0e723e6afb7ba1566ecb162fe4ea657f53d2139bbf894d050a"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:3545039fa4779be2df51d6395e91a810f57122290864918b172d5dc7ca5bb433"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a91481dbcddf1736c98a80b122afa0f7296eeb80b72344d7f45dc9f781551f56"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2ddfe41ddc81f29a4c44c8ce239eda5ade4e7fc305fb7311759dd6229a080052"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a7baf9ffc238e4bf401299f50e971a45bfcc10a785522541a6e3179c83eabf0a"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:31e9a882013c2f6bd2f2c974241bf4ba68c85eba943648ce88936d23209a2e01"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0a15438253b34e6362b2dc41475e7f80de76320f335e70c5528b7148cac253a1"}, + {file = "lxml-5.2.1-cp310-cp310-win32.whl", hash = "sha256:6992030d43b916407c9aa52e9673612ff39a575523c5f4cf72cdef75365709a5"}, + {file = "lxml-5.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:da052e7962ea2d5e5ef5bc0355d55007407087392cf465b7ad84ce5f3e25fe0f"}, + {file = "lxml-5.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:70ac664a48aa64e5e635ae5566f5227f2ab7f66a3990d67566d9907edcbbf867"}, + {file = "lxml-5.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1ae67b4e737cddc96c99461d2f75d218bdf7a0c3d3ad5604d1f5e7464a2f9ffe"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f18a5a84e16886898e51ab4b1d43acb3083c39b14c8caeb3589aabff0ee0b270"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6f2c8372b98208ce609c9e1d707f6918cc118fea4e2c754c9f0812c04ca116d"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:394ed3924d7a01b5bd9a0d9d946136e1c2f7b3dc337196d99e61740ed4bc6fe1"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d077bc40a1fe984e1a9931e801e42959a1e6598edc8a3223b061d30fbd26bbc"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:764b521b75701f60683500d8621841bec41a65eb739b8466000c6fdbc256c240"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:3a6b45da02336895da82b9d472cd274b22dc27a5cea1d4b793874eead23dd14f"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:5ea7b6766ac2dfe4bcac8b8595107665a18ef01f8c8343f00710b85096d1b53a"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:e196a4ff48310ba62e53a8e0f97ca2bca83cdd2fe2934d8b5cb0df0a841b193a"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:200e63525948e325d6a13a76ba2911f927ad399ef64f57898cf7c74e69b71095"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dae0ed02f6b075426accbf6b2863c3d0a7eacc1b41fb40f2251d931e50188dad"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:ab31a88a651039a07a3ae327d68ebdd8bc589b16938c09ef3f32a4b809dc96ef"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:df2e6f546c4df14bc81f9498bbc007fbb87669f1bb707c6138878c46b06f6510"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5dd1537e7cc06efd81371f5d1a992bd5ab156b2b4f88834ca852de4a8ea523fa"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9b9ec9c9978b708d488bec36b9e4c94d88fd12ccac3e62134a9d17ddba910ea9"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:8e77c69d5892cb5ba71703c4057091e31ccf534bd7f129307a4d084d90d014b8"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a8d5c70e04aac1eda5c829a26d1f75c6e5286c74743133d9f742cda8e53b9c2f"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c94e75445b00319c1fad60f3c98b09cd63fe1134a8a953dcd48989ef42318534"}, + {file = "lxml-5.2.1-cp311-cp311-win32.whl", hash = "sha256:4951e4f7a5680a2db62f7f4ab2f84617674d36d2d76a729b9a8be4b59b3659be"}, + {file = "lxml-5.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:5c670c0406bdc845b474b680b9a5456c561c65cf366f8db5a60154088c92d102"}, + {file = "lxml-5.2.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:abc25c3cab9ec7fcd299b9bcb3b8d4a1231877e425c650fa1c7576c5107ab851"}, + {file = "lxml-5.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6935bbf153f9a965f1e07c2649c0849d29832487c52bb4a5c5066031d8b44fd5"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d793bebb202a6000390a5390078e945bbb49855c29c7e4d56a85901326c3b5d9"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afd5562927cdef7c4f5550374acbc117fd4ecc05b5007bdfa57cc5355864e0a4"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0e7259016bc4345a31af861fdce942b77c99049d6c2107ca07dc2bba2435c1d9"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:530e7c04f72002d2f334d5257c8a51bf409db0316feee7c87e4385043be136af"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59689a75ba8d7ffca577aefd017d08d659d86ad4585ccc73e43edbfc7476781a"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f9737bf36262046213a28e789cc82d82c6ef19c85a0cf05e75c670a33342ac2c"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:3a74c4f27167cb95c1d4af1c0b59e88b7f3e0182138db2501c353555f7ec57f4"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:68a2610dbe138fa8c5826b3f6d98a7cfc29707b850ddcc3e21910a6fe51f6ca0"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:f0a1bc63a465b6d72569a9bba9f2ef0334c4e03958e043da1920299100bc7c08"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c2d35a1d047efd68027817b32ab1586c1169e60ca02c65d428ae815b593e65d4"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:79bd05260359170f78b181b59ce871673ed01ba048deef4bf49a36ab3e72e80b"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:865bad62df277c04beed9478fe665b9ef63eb28fe026d5dedcb89b537d2e2ea6"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:44f6c7caff88d988db017b9b0e4ab04934f11e3e72d478031efc7edcac6c622f"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:71e97313406ccf55d32cc98a533ee05c61e15d11b99215b237346171c179c0b0"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:057cdc6b86ab732cf361f8b4d8af87cf195a1f6dc5b0ff3de2dced242c2015e0"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f3bbbc998d42f8e561f347e798b85513ba4da324c2b3f9b7969e9c45b10f6169"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:491755202eb21a5e350dae00c6d9a17247769c64dcf62d8c788b5c135e179dc4"}, + {file = "lxml-5.2.1-cp312-cp312-win32.whl", hash = "sha256:8de8f9d6caa7f25b204fc861718815d41cbcf27ee8f028c89c882a0cf4ae4134"}, + {file = "lxml-5.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:f2a9efc53d5b714b8df2b4b3e992accf8ce5bbdfe544d74d5c6766c9e1146a3a"}, + {file = "lxml-5.2.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:70a9768e1b9d79edca17890175ba915654ee1725975d69ab64813dd785a2bd5c"}, + {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c38d7b9a690b090de999835f0443d8aa93ce5f2064035dfc48f27f02b4afc3d0"}, + {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5670fb70a828663cc37552a2a85bf2ac38475572b0e9b91283dc09efb52c41d1"}, + {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:958244ad566c3ffc385f47dddde4145088a0ab893504b54b52c041987a8c1863"}, + {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6241d4eee5f89453307c2f2bfa03b50362052ca0af1efecf9fef9a41a22bb4f"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2a66bf12fbd4666dd023b6f51223aed3d9f3b40fef06ce404cb75bafd3d89536"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:9123716666e25b7b71c4e1789ec829ed18663152008b58544d95b008ed9e21e9"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:0c3f67e2aeda739d1cc0b1102c9a9129f7dc83901226cc24dd72ba275ced4218"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:5d5792e9b3fb8d16a19f46aa8208987cfeafe082363ee2745ea8b643d9cc5b45"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:88e22fc0a6684337d25c994381ed8a1580a6f5ebebd5ad41f89f663ff4ec2885"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:21c2e6b09565ba5b45ae161b438e033a86ad1736b8c838c766146eff8ceffff9"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_s390x.whl", hash = "sha256:afbbdb120d1e78d2ba8064a68058001b871154cc57787031b645c9142b937a62"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:627402ad8dea044dde2eccde4370560a2b750ef894c9578e1d4f8ffd54000461"}, + {file = "lxml-5.2.1-cp36-cp36m-win32.whl", hash = "sha256:e89580a581bf478d8dcb97d9cd011d567768e8bc4095f8557b21c4d4c5fea7d0"}, + {file = "lxml-5.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:59565f10607c244bc4c05c0c5fa0c190c990996e0c719d05deec7030c2aa8289"}, + {file = "lxml-5.2.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:857500f88b17a6479202ff5fe5f580fc3404922cd02ab3716197adf1ef628029"}, + {file = "lxml-5.2.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56c22432809085b3f3ae04e6e7bdd36883d7258fcd90e53ba7b2e463efc7a6af"}, + {file = "lxml-5.2.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a55ee573116ba208932e2d1a037cc4b10d2c1cb264ced2184d00b18ce585b2c0"}, + {file = "lxml-5.2.1-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:6cf58416653c5901e12624e4013708b6e11142956e7f35e7a83f1ab02f3fe456"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:64c2baa7774bc22dd4474248ba16fe1a7f611c13ac6123408694d4cc93d66dbd"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:74b28c6334cca4dd704e8004cba1955af0b778cf449142e581e404bd211fb619"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:7221d49259aa1e5a8f00d3d28b1e0b76031655ca74bb287123ef56c3db92f213"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3dbe858ee582cbb2c6294dc85f55b5f19c918c2597855e950f34b660f1a5ede6"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:04ab5415bf6c86e0518d57240a96c4d1fcfc3cb370bb2ac2a732b67f579e5a04"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:6ab833e4735a7e5533711a6ea2df26459b96f9eec36d23f74cafe03631647c41"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f443cdef978430887ed55112b491f670bba6462cea7a7742ff8f14b7abb98d75"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:9e2addd2d1866fe112bc6f80117bcc6bc25191c5ed1bfbcf9f1386a884252ae8"}, + {file = "lxml-5.2.1-cp37-cp37m-win32.whl", hash = "sha256:f51969bac61441fd31f028d7b3b45962f3ecebf691a510495e5d2cd8c8092dbd"}, + {file = "lxml-5.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:b0b58fbfa1bf7367dde8a557994e3b1637294be6cf2169810375caf8571a085c"}, + {file = "lxml-5.2.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3e183c6e3298a2ed5af9d7a356ea823bccaab4ec2349dc9ed83999fd289d14d5"}, + {file = "lxml-5.2.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:804f74efe22b6a227306dd890eecc4f8c59ff25ca35f1f14e7482bbce96ef10b"}, + {file = "lxml-5.2.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:08802f0c56ed150cc6885ae0788a321b73505d2263ee56dad84d200cab11c07a"}, + {file = "lxml-5.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f8c09ed18ecb4ebf23e02b8e7a22a05d6411911e6fabef3a36e4f371f4f2585"}, + {file = "lxml-5.2.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3d30321949861404323c50aebeb1943461a67cd51d4200ab02babc58bd06a86"}, + {file = "lxml-5.2.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:b560e3aa4b1d49e0e6c847d72665384db35b2f5d45f8e6a5c0072e0283430533"}, + {file = "lxml-5.2.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:058a1308914f20784c9f4674036527e7c04f7be6fb60f5d61353545aa7fcb739"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:adfb84ca6b87e06bc6b146dc7da7623395db1e31621c4785ad0658c5028b37d7"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:417d14450f06d51f363e41cace6488519038f940676ce9664b34ebf5653433a5"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a2dfe7e2473f9b59496247aad6e23b405ddf2e12ef0765677b0081c02d6c2c0b"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bf2e2458345d9bffb0d9ec16557d8858c9c88d2d11fed53998512504cd9df49b"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:58278b29cb89f3e43ff3e0c756abbd1518f3ee6adad9e35b51fb101c1c1daaec"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:64641a6068a16201366476731301441ce93457eb8452056f570133a6ceb15fca"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:78bfa756eab503673991bdcf464917ef7845a964903d3302c5f68417ecdc948c"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:11a04306fcba10cd9637e669fd73aa274c1c09ca64af79c041aa820ea992b637"}, + {file = "lxml-5.2.1-cp38-cp38-win32.whl", hash = "sha256:66bc5eb8a323ed9894f8fa0ee6cb3e3fb2403d99aee635078fd19a8bc7a5a5da"}, + {file = "lxml-5.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:9676bfc686fa6a3fa10cd4ae6b76cae8be26eb5ec6811d2a325636c460da1806"}, + {file = "lxml-5.2.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cf22b41fdae514ee2f1691b6c3cdeae666d8b7fa9434de445f12bbeee0cf48dd"}, + {file = "lxml-5.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ec42088248c596dbd61d4ae8a5b004f97a4d91a9fd286f632e42e60b706718d7"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd53553ddad4a9c2f1f022756ae64abe16da1feb497edf4d9f87f99ec7cf86bd"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feaa45c0eae424d3e90d78823f3828e7dc42a42f21ed420db98da2c4ecf0a2cb"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddc678fb4c7e30cf830a2b5a8d869538bc55b28d6c68544d09c7d0d8f17694dc"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:853e074d4931dbcba7480d4dcab23d5c56bd9607f92825ab80ee2bd916edea53"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc4691d60512798304acb9207987e7b2b7c44627ea88b9d77489bbe3e6cc3bd4"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:beb72935a941965c52990f3a32d7f07ce869fe21c6af8b34bf6a277b33a345d3"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:6588c459c5627fefa30139be4d2e28a2c2a1d0d1c265aad2ba1935a7863a4913"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:588008b8497667f1ddca7c99f2f85ce8511f8f7871b4a06ceede68ab62dff64b"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b6787b643356111dfd4032b5bffe26d2f8331556ecb79e15dacb9275da02866e"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7c17b64b0a6ef4e5affae6a3724010a7a66bda48a62cfe0674dabd46642e8b54"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:27aa20d45c2e0b8cd05da6d4759649170e8dfc4f4e5ef33a34d06f2d79075d57"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d4f2cc7060dc3646632d7f15fe68e2fa98f58e35dd5666cd525f3b35d3fed7f8"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff46d772d5f6f73564979cd77a4fffe55c916a05f3cb70e7c9c0590059fb29ef"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:96323338e6c14e958d775700ec8a88346014a85e5de73ac7967db0367582049b"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:52421b41ac99e9d91934e4d0d0fe7da9f02bfa7536bb4431b4c05c906c8c6919"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:7a7efd5b6d3e30d81ec68ab8a88252d7c7c6f13aaa875009fe3097eb4e30b84c"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0ed777c1e8c99b63037b91f9d73a6aad20fd035d77ac84afcc205225f8f41188"}, + {file = "lxml-5.2.1-cp39-cp39-win32.whl", hash = "sha256:644df54d729ef810dcd0f7732e50e5ad1bd0a135278ed8d6bcb06f33b6b6f708"}, + {file = "lxml-5.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:9ca66b8e90daca431b7ca1408cae085d025326570e57749695d6a01454790e95"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9b0ff53900566bc6325ecde9181d89afadc59c5ffa39bddf084aaedfe3b06a11"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd6037392f2d57793ab98d9e26798f44b8b4da2f2464388588f48ac52c489ea1"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b9c07e7a45bb64e21df4b6aa623cb8ba214dfb47d2027d90eac197329bb5e94"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3249cc2989d9090eeac5467e50e9ec2d40704fea9ab72f36b034ea34ee65ca98"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f42038016852ae51b4088b2862126535cc4fc85802bfe30dea3500fdfaf1864e"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:533658f8fbf056b70e434dff7e7aa611bcacb33e01f75de7f821810e48d1bb66"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:622020d4521e22fb371e15f580d153134bfb68d6a429d1342a25f051ec72df1c"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efa7b51824aa0ee957ccd5a741c73e6851de55f40d807f08069eb4c5a26b2baa"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c6ad0fbf105f6bcc9300c00010a2ffa44ea6f555df1a2ad95c88f5656104817"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e233db59c8f76630c512ab4a4daf5a5986da5c3d5b44b8e9fc742f2a24dbd460"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6a014510830df1475176466b6087fc0c08b47a36714823e58d8b8d7709132a96"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:d38c8f50ecf57f0463399569aa388b232cf1a2ffb8f0a9a5412d0db57e054860"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5aea8212fb823e006b995c4dda533edcf98a893d941f173f6c9506126188860d"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff097ae562e637409b429a7ac958a20aab237a0378c42dabaa1e3abf2f896e5f"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f5d65c39f16717a47c36c756af0fb36144069c4718824b7533f803ecdf91138"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3d0c3dd24bb4605439bf91068598d00c6370684f8de4a67c2992683f6c309d6b"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e32be23d538753a8adb6c85bd539f5fd3b15cb987404327c569dfc5fd8366e85"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cc518cea79fd1e2f6c90baafa28906d4309d24f3a63e801d855e7424c5b34144"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a0af35bd8ebf84888373630f73f24e86bf016642fb8576fba49d3d6b560b7cbc"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8aca2e3a72f37bfc7b14ba96d4056244001ddcc18382bd0daa087fd2e68a354"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ca1e8188b26a819387b29c3895c47a5e618708fe6f787f3b1a471de2c4a94d9"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c8ba129e6d3b0136a0f50345b2cb3db53f6bda5dd8c7f5d83fbccba97fb5dcb5"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e998e304036198b4f6914e6a1e2b6f925208a20e2042563d9734881150c6c246"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d3be9b2076112e51b323bdf6d5a7f8a798de55fb8d95fcb64bd179460cdc0704"}, + {file = "lxml-5.2.1.tar.gz", hash = "sha256:3f7765e69bbce0906a7c74d5fe46d2c7a7596147318dbc08e4a2431f3060e306"}, ] [package.extras] @@ -1982,6 +2119,7 @@ source = ["Cython (>=3.0.10)"] name = "markdown" version = "3.5.2" description = "Python implementation of John Gruber's Markdown." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2000,6 +2138,7 @@ testing = ["coverage", "pyyaml"] name = "markdown-it-py" version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2024,6 +2163,7 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "markupsafe" version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2093,6 +2233,7 @@ files = [ name = "matplotlib-inline" version = "0.1.6" description = "Inline Matplotlib backend for Jupyter" +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -2107,6 +2248,7 @@ traitlets = "*" name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2118,6 +2260,7 @@ files = [ name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2129,6 +2272,7 @@ files = [ name = "mergedeep" version = "1.3.4" description = "A deep merge function for 🐍." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2140,6 +2284,7 @@ files = [ name = "mkdocs" version = "1.5.2" description = "Project documentation with Markdown." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2171,6 +2316,7 @@ min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-imp name = "mkdocs-autorefs" version = "1.0.1" description = "Automatically link across pages in MkDocs." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2187,6 +2333,7 @@ mkdocs = ">=1.1" name = "mkdocs-material" version = "9.1.15" description = "Documentation that simply works" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2209,6 +2356,7 @@ requests = ">=2.26" name = "mkdocs-material-extensions" version = "1.3.1" description = "Extension pack for Python Markdown and MkDocs Material." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2220,6 +2368,7 @@ files = [ name = "mkdocs-version-annotations" version = "1.0.0" description = "MkDocs plugin to add custom admonitions for documenting version differences" +category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -2231,6 +2380,7 @@ files = [ name = "mkdocstrings" version = "0.22.0" description = "Automatic documentation from sources, for MkDocs." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2257,6 +2407,7 @@ python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"] name = "mkdocstrings-python" version = "1.5.2" description = "A Python handler for mkdocstrings." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2272,6 +2423,7 @@ mkdocstrings = ">=0.20" name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -2283,6 +2435,7 @@ files = [ name = "napalm" version = "4.1.0" description = "Network Automation and Programmability Abstraction Layer with Multivendor support" +category = "main" optional = false python-versions = "*" files = [ @@ -2315,6 +2468,7 @@ typing-extensions = ">=4.3.0" name = "nautobot" version = "2.2.0" description = "Source of truth and network automation platform." +category = "main" optional = false python-versions = "<3.12,>=3.8" files = [ @@ -2379,6 +2533,7 @@ sso = ["social-auth-core[openidconnect,saml] (>=4.5.3,<4.6.0)"] name = "nautobot-plugin-nornir" version = "2.0.0" description = "Nautobot Nornir plugin." +category = "main" optional = false python-versions = ">=3.8,<3.12" files = [ @@ -2397,6 +2552,7 @@ nautobot = ["nautobot (>=2.0.0,<3.0.0)"] name = "nautobot-ssot" version = "2.5.0" description = "Nautobot Single Source of Truth" +category = "main" optional = false python-versions = "<3.12,>=3.8" files = [ @@ -2427,6 +2583,7 @@ servicenow = ["Jinja2 (>=2.11.3)", "PyYAML (>=6)", "ijson (>=2.5.1)", "oauthlib name = "ncclient" version = "0.6.15" description = "Python library for NETCONF clients" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -2443,6 +2600,7 @@ six = "*" name = "netaddr" version = "0.8.0" description = "A network address manipulation library for Python" +category = "main" optional = false python-versions = "*" files = [ @@ -2454,6 +2612,7 @@ files = [ name = "netmiko" version = "4.3.0" description = "Multi-vendor library to simplify legacy CLI connections to network devices" +category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2471,13 +2630,14 @@ textfsm = ">=1.1.3" [[package]] name = "netutils" -version = "1.7.0" +version = "1.8.0" description = "Common helper functions useful in network automation." +category = "main" optional = false -python-versions = ">=3.8,<4.0" +python-versions = "<4.0,>=3.8" files = [ - {file = "netutils-1.7.0-py3-none-any.whl", hash = "sha256:ad2e65d2e5bb7cf857faeee96f03b8823782c509cb003f2e4e86cccf5b0a3328"}, - {file = "netutils-1.7.0.tar.gz", hash = "sha256:e0f461092e02c03166a6830706377dfe079b661ad9e41940f265424121621dc8"}, + {file = "netutils-1.8.0-py3-none-any.whl", hash = "sha256:5e705793528d8e771edae6648b15c9f9a7c3cfc9c749299f6ff4a35454545858"}, + {file = "netutils-1.8.0.tar.gz", hash = "sha256:d5e0205c2e8f095314cf755f4dbda956db42a97502501824c6c4764726eda93f"}, ] [package.extras] @@ -2487,6 +2647,7 @@ optionals = ["jsonschema (>=4.17.3,<5.0.0)", "napalm (>=4.0.0,<5.0.0)"] name = "nh3" version = "0.2.17" description = "Python bindings to the ammonia HTML sanitization library." +category = "main" optional = false python-versions = "*" files = [ @@ -2512,6 +2673,7 @@ files = [ name = "nornir" version = "3.4.1" description = "Pluggable multi-threaded framework with inventory management to help operate collections of devices" +category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2528,6 +2690,7 @@ mypy_extensions = ">=1.0.0,<2.0.0" name = "nornir-jinja2" version = "0.2.0" description = "Jinja2 plugins for nornir" +category = "main" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -2543,6 +2706,7 @@ nornir = ">=3,<4" name = "nornir-napalm" version = "0.4.0" description = "NAPALM's plugins for nornir" +category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -2556,13 +2720,14 @@ nornir = ">=3,<4" [[package]] name = "nornir-nautobot" -version = "3.1.2" +version = "3.2.0" description = "Nornir Nautobot" +category = "main" optional = false -python-versions = ">=3.8,<4.0" +python-versions = "<4.0,>=3.8" files = [ - {file = "nornir_nautobot-3.1.2-py3-none-any.whl", hash = "sha256:b93597c507371674ca83b36509d043419ca264572d01151746c52c37bf1421f6"}, - {file = "nornir_nautobot-3.1.2.tar.gz", hash = "sha256:69fcf4da83b53b876267f18451b9bb7071218519bfdb8969fb1744825a029901"}, + {file = "nornir_nautobot-3.2.0-py3-none-any.whl", hash = "sha256:ed0ac258eebd2e3072f1d7a0c1f964965e7c9bf8c744290bb5ea04d5800b0ef4"}, + {file = "nornir_nautobot-3.2.0.tar.gz", hash = "sha256:087ad3f6b37112e2a4ff4be64a3b5bfbddfae22057c182e57fae7084850d3d63"}, ] [package.dependencies] @@ -2583,6 +2748,7 @@ mikrotik-driver = ["routeros-api (>=0.17.0,<0.18.0)"] name = "nornir-netmiko" version = "1.0.1" description = "Netmiko's plugins for Nornir" +category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2597,6 +2763,7 @@ netmiko = ">=4.0.0,<5.0.0" name = "nornir-utils" version = "0.2.0" description = "Collection of plugins and functions for nornir that don't require external dependencies" +category = "main" optional = false python-versions = ">=3.6.2,<4.0.0" files = [ @@ -2612,6 +2779,7 @@ nornir = ">=3,<4" name = "ntc-templates" version = "4.4.0" description = "TextFSM Templates for Network Devices, and Python wrapper for TextFSM's CliTable." +category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2626,6 +2794,7 @@ textfsm = ">=1.1.0,<2.0.0" name = "oauthlib" version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2642,6 +2811,7 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] name = "ordered-set" version = "4.1.0" description = "An OrderedSet is a custom MutableSet that remembers its order, so that every" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2656,6 +2826,7 @@ dev = ["black", "mypy", "pytest"] name = "packaging" version = "23.2" description = "Core utilities for Python packages" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2667,6 +2838,7 @@ files = [ name = "paramiko" version = "3.4.0" description = "SSH2 protocol library" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2688,6 +2860,7 @@ invoke = ["invoke (>=2.0)"] name = "parso" version = "0.8.3" description = "A Python Parser" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2703,6 +2876,7 @@ testing = ["docopt", "pytest (<6.0.0)"] name = "pathspec" version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2714,6 +2888,7 @@ files = [ name = "pbr" version = "6.0.0" description = "Python Build Reasonableness" +category = "dev" optional = false python-versions = ">=2.6" files = [ @@ -2721,10 +2896,23 @@ files = [ {file = "pbr-6.0.0.tar.gz", hash = "sha256:d1377122a5a00e2f940ee482999518efe16d745d423a670c27773dfbc3c9a7d9"}, ] +[[package]] +name = "pep8" +version = "1.7.1" +description = "Python style guide checker" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "pep8-1.7.1-py2.py3-none-any.whl", hash = "sha256:b22cfae5db09833bb9bd7c8463b53e1a9c9b39f12e304a8d0bba729c501827ee"}, + {file = "pep8-1.7.1.tar.gz", hash = "sha256:fe249b52e20498e59e0b5c5256aa52ee99fc295b26ec9eaa85776ffdb9fe6374"}, +] + [[package]] name = "pexpect" version = "4.9.0" description = "Pexpect allows easy control of interactive console applications." +category = "dev" optional = false python-versions = "*" files = [ @@ -2739,6 +2927,7 @@ ptyprocess = ">=0.5" name = "pickleshare" version = "0.7.5" description = "Tiny 'shelve'-like database with concurrency support" +category = "dev" optional = false python-versions = "*" files = [ @@ -2750,6 +2939,7 @@ files = [ name = "pillow" version = "10.2.0" description = "Python Imaging Library (Fork)" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2835,6 +3025,7 @@ xmp = ["defusedxml"] name = "pkgutil-resolve-name" version = "1.3.10" description = "Resolve a name to an object." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2846,6 +3037,7 @@ files = [ name = "platformdirs" version = "4.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2861,6 +3053,7 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest- name = "prometheus-client" version = "0.17.1" description = "Python client for the Prometheus monitoring system." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2875,6 +3068,7 @@ twisted = ["twisted"] name = "promise" version = "2.3" description = "Promises/A+ implementation for Python" +category = "main" optional = false python-versions = "*" files = [ @@ -2891,6 +3085,7 @@ test = ["coveralls", "futures", "mock", "pytest (>=2.7.3)", "pytest-benchmark", name = "prompt-toolkit" version = "3.0.43" description = "Library for building powerful interactive command lines in Python" +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -2905,6 +3100,7 @@ wcwidth = "*" name = "psycopg2-binary" version = "2.9.9" description = "psycopg2 - Python-PostgreSQL Database Adapter" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2986,6 +3182,7 @@ files = [ name = "ptyprocess" version = "0.7.0" description = "Run a subprocess in a pseudo terminal" +category = "dev" optional = false python-versions = "*" files = [ @@ -2997,6 +3194,7 @@ files = [ name = "pure-eval" version = "0.2.2" description = "Safely evaluate AST nodes without side effects" +category = "dev" optional = false python-versions = "*" files = [ @@ -3009,19 +3207,21 @@ tests = ["pytest"] [[package]] name = "pycodestyle" -version = "2.9.1" +version = "2.11.1" description = "Python style guide checker" +category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, - {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, + {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, + {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, ] [[package]] name = "pycparser" version = "2.22" description = "C parser in Python" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3031,47 +3231,48 @@ files = [ [[package]] name = "pydantic" -version = "1.10.14" +version = "1.10.15" description = "Data validation and settings management using python type hints" +category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, - {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, - {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, - {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, - {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, - {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, - {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, - {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, - {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, - {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, + {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, + {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, + {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, + {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, + {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, + {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, + {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, + {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, ] [package.dependencies] @@ -3085,6 +3286,7 @@ email = ["email-validator (>=1.0.3)"] name = "pyeapi" version = "1.0.2" description = "Python Client for eAPI" +category = "main" optional = false python-versions = "*" files = [ @@ -3100,19 +3302,21 @@ test = ["coverage", "mock"] [[package]] name = "pyflakes" -version = "2.5.0" +version = "3.2.0" description = "passive checker of Python programs" +category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, - {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, + {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, + {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, ] [[package]] name = "pygments" version = "2.17.2" description = "Pygments is a syntax highlighting package written in Python." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3128,6 +3332,7 @@ windows-terminal = ["colorama (>=0.4.6)"] name = "pyjwt" version = "2.8.0" description = "JSON Web Token implementation in Python" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3145,6 +3350,7 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] name = "pylint" version = "3.1.0" description = "python code static checker" +category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -3174,6 +3380,7 @@ testutils = ["gitpython (>3)"] name = "pylint-django" version = "2.5.5" description = "A Pylint plugin to help Pylint understand the Django web framework" +category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -3192,6 +3399,7 @@ with-django = ["Django (>=2.2)"] name = "pylint-nautobot" version = "0.3.0" description = "Custom Pylint Rules for Nautobot" +category = "dev" optional = false python-versions = ">=3.8,<3.12" files = [ @@ -3209,6 +3417,7 @@ toml = ">=0.10.2" name = "pylint-plugin-utils" version = "0.8.2" description = "Utilities and helpers for writing Pylint plugins" +category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -3223,6 +3432,7 @@ pylint = ">=1.7" name = "pymdown-extensions" version = "10.7.1" description = "Extension pack for Python Markdown." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3241,6 +3451,7 @@ extra = ["pygments (>=2.12)"] name = "pynacl" version = "1.5.0" description = "Python binding to the Networking and Cryptography (NaCl) library" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3267,6 +3478,7 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] name = "pynautobot" version = "2.1.1" description = "Nautobot API client library" +category = "main" optional = false python-versions = "<4.0,>=3.8" files = [ @@ -3283,6 +3495,7 @@ urllib3 = ">=1.21.1,<1.27" name = "pyparsing" version = "3.1.2" description = "pyparsing module - Classes and methods to define and execute parsing grammars" +category = "main" optional = false python-versions = ">=3.6.8" files = [ @@ -3297,6 +3510,7 @@ diagrams = ["jinja2", "railroad-diagrams"] name = "pyserial" version = "3.5" description = "Python Serial Port Extension" +category = "main" optional = false python-versions = "*" files = [ @@ -3311,6 +3525,7 @@ cp2110 = ["hidapi"] name = "python-crontab" version = "3.0.0" description = "Python Crontab API" +category = "main" optional = false python-versions = "*" files = [ @@ -3329,6 +3544,7 @@ cron-schedule = ["croniter"] name = "python-dateutil" version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -3343,6 +3559,7 @@ six = ">=1.5" name = "python-slugify" version = "8.0.4" description = "A Python slugify application that also handles Unicode" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3360,6 +3577,7 @@ unidecode = ["Unidecode (>=1.1.1)"] name = "python3-openid" version = "3.2.0" description = "OpenID support for modern servers and consumers." +category = "main" optional = false python-versions = "*" files = [ @@ -3378,6 +3596,7 @@ postgresql = ["psycopg2"] name = "pytz" version = "2024.1" description = "World timezone definitions, modern and historical" +category = "main" optional = false python-versions = "*" files = [ @@ -3389,6 +3608,7 @@ files = [ name = "pyuwsgi" version = "2.0.23.post0" description = "The uWSGI server" +category = "main" optional = false python-versions = "*" files = [ @@ -3441,6 +3661,7 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3462,7 +3683,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -3501,6 +3721,7 @@ files = [ name = "pyyaml-env-tag" version = "0.1" description = "A custom YAML tag for referencing environment variables in YAML files. " +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3515,6 +3736,7 @@ pyyaml = "*" name = "redis" version = "5.0.3" description = "Python client for Redis database and key-value store" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3533,6 +3755,7 @@ ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)" name = "referencing" version = "0.34.0" description = "JSON Referencing + Python" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3548,6 +3771,7 @@ rpds-py = ">=0.7.0" name = "regex" version = "2023.12.25" description = "Alternative regular expression module, to replace re." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3650,6 +3874,7 @@ files = [ name = "requests" version = "2.31.0" description = "Python HTTP for Humans." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3671,6 +3896,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-oauthlib" version = "2.0.0" description = "OAuthlib authentication support for Requests." +category = "main" optional = false python-versions = ">=3.4" files = [ @@ -3689,6 +3915,7 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] name = "rich" version = "13.7.1" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -3708,6 +3935,7 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] name = "rpds-py" version = "0.18.0" description = "Python bindings to Rust's persistent data structures (rpds)" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3816,6 +4044,7 @@ files = [ name = "ruamel-yaml" version = "0.18.6" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3834,6 +4063,7 @@ jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] name = "ruamel-yaml-clib" version = "0.2.8" description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3891,34 +4121,36 @@ files = [ [[package]] name = "ruff" -version = "0.3.4" +version = "0.3.5" description = "An extremely fast Python linter and code formatter, written in Rust." +category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.3.4-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:60c870a7d46efcbc8385d27ec07fe534ac32f3b251e4fc44b3cbfd9e09609ef4"}, - {file = "ruff-0.3.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6fc14fa742e1d8f24910e1fff0bd5e26d395b0e0e04cc1b15c7c5e5fe5b4af91"}, - {file = "ruff-0.3.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3ee7880f653cc03749a3bfea720cf2a192e4f884925b0cf7eecce82f0ce5854"}, - {file = "ruff-0.3.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf133dd744f2470b347f602452a88e70dadfbe0fcfb5fd46e093d55da65f82f7"}, - {file = "ruff-0.3.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f3860057590e810c7ffea75669bdc6927bfd91e29b4baa9258fd48b540a4365"}, - {file = "ruff-0.3.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:986f2377f7cf12efac1f515fc1a5b753c000ed1e0a6de96747cdf2da20a1b369"}, - {file = "ruff-0.3.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fd98e85869603e65f554fdc5cddf0712e352fe6e61d29d5a6fe087ec82b76c"}, - {file = "ruff-0.3.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64abeed785dad51801b423fa51840b1764b35d6c461ea8caef9cf9e5e5ab34d9"}, - {file = "ruff-0.3.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df52972138318bc7546d92348a1ee58449bc3f9eaf0db278906eb511889c4b50"}, - {file = "ruff-0.3.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:98e98300056445ba2cc27d0b325fd044dc17fcc38e4e4d2c7711585bd0a958ed"}, - {file = "ruff-0.3.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:519cf6a0ebed244dce1dc8aecd3dc99add7a2ee15bb68cf19588bb5bf58e0488"}, - {file = "ruff-0.3.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:bb0acfb921030d00070539c038cd24bb1df73a2981e9f55942514af8b17be94e"}, - {file = "ruff-0.3.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cf187a7e7098233d0d0c71175375c5162f880126c4c716fa28a8ac418dcf3378"}, - {file = "ruff-0.3.4-py3-none-win32.whl", hash = "sha256:af27ac187c0a331e8ef91d84bf1c3c6a5dea97e912a7560ac0cef25c526a4102"}, - {file = "ruff-0.3.4-py3-none-win_amd64.whl", hash = "sha256:de0d5069b165e5a32b3c6ffbb81c350b1e3d3483347196ffdf86dc0ef9e37dd6"}, - {file = "ruff-0.3.4-py3-none-win_arm64.whl", hash = "sha256:6810563cc08ad0096b57c717bd78aeac888a1bfd38654d9113cb3dc4d3f74232"}, - {file = "ruff-0.3.4.tar.gz", hash = "sha256:f0f4484c6541a99862b693e13a151435a279b271cff20e37101116a21e2a1ad1"}, + {file = "ruff-0.3.5-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:aef5bd3b89e657007e1be6b16553c8813b221ff6d92c7526b7e0227450981eac"}, + {file = "ruff-0.3.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:89b1e92b3bd9fca249153a97d23f29bed3992cff414b222fcd361d763fc53f12"}, + {file = "ruff-0.3.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e55771559c89272c3ebab23326dc23e7f813e492052391fe7950c1a5a139d89"}, + {file = "ruff-0.3.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dabc62195bf54b8a7876add6e789caae0268f34582333cda340497c886111c39"}, + {file = "ruff-0.3.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a05f3793ba25f194f395578579c546ca5d83e0195f992edc32e5907d142bfa3"}, + {file = "ruff-0.3.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:dfd3504e881082959b4160ab02f7a205f0fadc0a9619cc481982b6837b2fd4c0"}, + {file = "ruff-0.3.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87258e0d4b04046cf1d6cc1c56fadbf7a880cc3de1f7294938e923234cf9e498"}, + {file = "ruff-0.3.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:712e71283fc7d9f95047ed5f793bc019b0b0a29849b14664a60fd66c23b96da1"}, + {file = "ruff-0.3.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a532a90b4a18d3f722c124c513ffb5e5eaff0cc4f6d3aa4bda38e691b8600c9f"}, + {file = "ruff-0.3.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:122de171a147c76ada00f76df533b54676f6e321e61bd8656ae54be326c10296"}, + {file = "ruff-0.3.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d80a6b18a6c3b6ed25b71b05eba183f37d9bc8b16ace9e3d700997f00b74660b"}, + {file = "ruff-0.3.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a7b6e63194c68bca8e71f81de30cfa6f58ff70393cf45aab4c20f158227d5936"}, + {file = "ruff-0.3.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a759d33a20c72f2dfa54dae6e85e1225b8e302e8ac655773aff22e542a300985"}, + {file = "ruff-0.3.5-py3-none-win32.whl", hash = "sha256:9d8605aa990045517c911726d21293ef4baa64f87265896e491a05461cae078d"}, + {file = "ruff-0.3.5-py3-none-win_amd64.whl", hash = "sha256:dc56bb16a63c1303bd47563c60482a1512721053d93231cf7e9e1c6954395a0e"}, + {file = "ruff-0.3.5-py3-none-win_arm64.whl", hash = "sha256:faeeae9905446b975dcf6d4499dc93439b131f1443ee264055c5716dd947af55"}, + {file = "ruff-0.3.5.tar.gz", hash = "sha256:a067daaeb1dc2baf9b82a32dae67d154d95212080c80435eb052d95da647763d"}, ] [[package]] name = "rx" version = "1.6.3" description = "Reactive Extensions (Rx) for Python" +category = "main" optional = false python-versions = "*" files = [ @@ -3929,6 +4161,7 @@ files = [ name = "scp" version = "0.14.5" description = "scp module for paramiko" +category = "main" optional = false python-versions = "*" files = [ @@ -3943,6 +4176,7 @@ paramiko = "*" name = "setuptools" version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3959,6 +4193,7 @@ testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jar name = "singledispatch" version = "4.1.0" description = "Backport functools.singledispatch to older Pythons." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3974,6 +4209,7 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -3985,6 +4221,7 @@ files = [ name = "smmap" version = "5.0.1" description = "A pure Python implementation of a sliding window memory map manager" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3996,6 +4233,7 @@ files = [ name = "sniffio" version = "1.3.1" description = "Sniff out which async library your code is running under" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4007,6 +4245,7 @@ files = [ name = "social-auth-app-django" version = "5.2.0" description = "Python Social Authentication, Django integration." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4022,6 +4261,7 @@ social-auth-core = ">=4.4.1" name = "social-auth-core" version = "4.5.3" description = "Python social authentication made simple." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4048,6 +4288,7 @@ saml = ["python3-saml (>=1.5.0)"] name = "sqlparse" version = "0.4.4" description = "A non-validating SQL parser." +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -4064,6 +4305,7 @@ test = ["pytest", "pytest-cov"] name = "stack-data" version = "0.6.3" description = "Extract data from python stack frames and tracebacks for informative displays" +category = "dev" optional = false python-versions = "*" files = [ @@ -4083,6 +4325,7 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] name = "stevedore" version = "5.2.0" description = "Manage dynamic plugins for Python applications" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -4097,6 +4340,7 @@ pbr = ">=2.0.0,<2.1.0 || >2.1.0" name = "structlog" version = "22.3.0" description = "Structured Logging for Python" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4114,6 +4358,7 @@ typing = ["mypy", "rich", "twisted"] name = "svgwrite" version = "1.4.3" description = "A Python library to create SVG drawings." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4125,6 +4370,7 @@ files = [ name = "text-unidecode" version = "1.3" description = "The most basic Text::Unidecode port" +category = "main" optional = false python-versions = "*" files = [ @@ -4136,6 +4382,7 @@ files = [ name = "textfsm" version = "1.1.3" description = "Python module for parsing semi-structured text into python tables." +category = "main" optional = false python-versions = "*" files = [ @@ -4151,6 +4398,7 @@ six = "*" name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" +category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -4162,6 +4410,7 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4173,6 +4422,7 @@ files = [ name = "tomlkit" version = "0.12.4" description = "Style preserving TOML library" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4184,6 +4434,7 @@ files = [ name = "towncrier" version = "23.6.0" description = "Building newsfiles for your project." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4206,6 +4457,7 @@ dev = ["furo", "packaging", "sphinx (>=5)", "twisted"] name = "traitlets" version = "5.14.2" description = "Traitlets Python configuration system" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -4221,6 +4473,7 @@ test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0, name = "transitions" version = "0.9.0" description = "A lightweight, object-oriented Python state machine implementation with many extensions." +category = "main" optional = false python-versions = "*" files = [ @@ -4239,6 +4492,7 @@ test = ["pytest"] name = "ttp" version = "0.9.5" description = "Template Text Parser" +category = "main" optional = false python-versions = ">=2.7,<4.0" files = [ @@ -4254,6 +4508,7 @@ full = ["cerberus (>=1.3.0,<1.4.0)", "deepdiff (>=5.8.0,<5.9.0)", "jinja2 (>=3.0 name = "ttp-templates" version = "0.3.6" description = "Template Text Parser Templates collections" +category = "main" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -4271,6 +4526,7 @@ docs = ["mkdocs (==1.2.4)", "mkdocs-material (==7.2.2)", "mkdocs-material-extens name = "typing-extensions" version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4282,6 +4538,7 @@ files = [ name = "tzdata" version = "2024.1" description = "Provider of IANA time zone data" +category = "main" optional = false python-versions = ">=2" files = [ @@ -4293,6 +4550,7 @@ files = [ name = "uritemplate" version = "4.1.1" description = "Implementation of RFC 6570 URI Templates" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4304,6 +4562,7 @@ files = [ name = "urllib3" version = "1.26.18" description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -4320,6 +4579,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "vine" version = "5.1.0" description = "Python promises." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4331,6 +4591,7 @@ files = [ name = "watchdog" version = "4.0.0" description = "Filesystem events monitoring" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -4372,6 +4633,7 @@ watchmedo = ["PyYAML (>=3.10)"] name = "wcwidth" version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" +category = "main" optional = false python-versions = "*" files = [ @@ -4383,6 +4645,7 @@ files = [ name = "wheel" version = "0.43.0" description = "A built-package format for Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -4397,6 +4660,7 @@ test = ["pytest (>=6.0.0)", "setuptools (>=65)"] name = "yamllint" version = "1.35.1" description = "A linter for YAML files." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -4415,6 +4679,7 @@ dev = ["doc8", "flake8", "flake8-import-order", "rstcheck[sphinx]", "sphinx"] name = "yamlordereddictloader" version = "0.4.2" description = "YAML loader and dumper for PyYAML allowing to keep keys order." +category = "main" optional = false python-versions = "*" files = [ @@ -4429,6 +4694,7 @@ pyyaml = "*" name = "zipp" version = "3.18.1" description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" optional = false python-versions = ">=3.8" files = [ diff --git a/tasks.py b/tasks.py index 6bd18e00..88dfd468 100644 --- a/tasks.py +++ b/tasks.py @@ -48,7 +48,7 @@ def is_truthy(arg): namespace.configure( { "nautobot_device_onboarding": { - "nautobot_ver": "2.1.1", + "nautobot_ver": "2.1.9", "project_name": "nautobot-device-onboarding", "python_ver": "3.11", "local": False, From c4e601093ec8ab079f2850607adf5d64f4ea085e Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Fri, 5 Apr 2024 14:50:44 -0500 Subject: [PATCH 195/225] more refactors and few cleanups --- .../{nornir_plays => }/jinja_filters.py | 0 nautobot_device_onboarding/nornir_plays/command_getter.py | 2 +- nautobot_device_onboarding/nornir_plays/formatter.py | 7 ++++--- .../nornir_plays/inventory_creator.py | 4 ---- 4 files changed, 5 insertions(+), 8 deletions(-) rename nautobot_device_onboarding/{nornir_plays => }/jinja_filters.py (100%) diff --git a/nautobot_device_onboarding/nornir_plays/jinja_filters.py b/nautobot_device_onboarding/jinja_filters.py similarity index 100% rename from nautobot_device_onboarding/nornir_plays/jinja_filters.py rename to nautobot_device_onboarding/jinja_filters.py diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index 18cd7bad..9e73c5d3 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -217,5 +217,5 @@ def command_getter_ni(job_result, log_level, kwargs): logger.info("Error: %s", err) return err compiled_results = format_results(compiled_results) - + print(f"compiled_results: {compiled_results}") return compiled_results diff --git a/nautobot_device_onboarding/nornir_plays/formatter.py b/nautobot_device_onboarding/nornir_plays/formatter.py index 0ff8c816..324782a1 100755 --- a/nautobot_device_onboarding/nornir_plays/formatter.py +++ b/nautobot_device_onboarding/nornir_plays/formatter.py @@ -145,8 +145,10 @@ def format_ios_results(device): mac_list = ensure_list(macs) description_list = ensure_list(descriptions) link_status_list = ensure_list(link_statuses) - vrf_list = ensure_list(vrfs) - + if vrfs is None: + vrf_list = [] + else: + vrf_list = ensure_list(vrfs) interface_dict = {} for item in mtu_list: interface_dict.setdefault(item["interface"], {})["mtu"] = item["mtu"] @@ -169,7 +171,6 @@ def format_ios_results(device): ) for interface in interface_dict.values(): interface.setdefault("vrf", {}) - for vrf in vrf_list: for interface in vrf["interfaces"]: canonical_name = canonical_interface_name(interface) diff --git a/nautobot_device_onboarding/nornir_plays/inventory_creator.py b/nautobot_device_onboarding/nornir_plays/inventory_creator.py index 36c4e2c8..059b3782 100755 --- a/nautobot_device_onboarding/nornir_plays/inventory_creator.py +++ b/nautobot_device_onboarding/nornir_plays/inventory_creator.py @@ -3,8 +3,6 @@ from netmiko import SSHDetect from nornir.core.inventory import ConnectionOptions, Host -from nautobot_device_onboarding.nornir_plays.transform import add_platform_parsing_info - def guess_netmiko_device_type(hostname, username, password, port): """Guess the device type of host, based on Netmiko.""" @@ -32,7 +30,6 @@ def guess_netmiko_device_type(hostname, username, password, port): def _set_inventory(host_ip, platform, port, username, password): """Construct Nornir Inventory.""" - parsing_info = add_platform_parsing_info(host_ip) inv = {} if platform: platform = platform.network_driver @@ -55,7 +52,6 @@ def _set_inventory(host_ip, platform, port, username, password): platform=platform, ) }, - data={"platform_parsing_info": parsing_info[platform]}, ) inv.update({host_ip: host}) From 3de016829db46767e83e9ce4b1ecaf41cfd72162 Mon Sep 17 00:00:00 2001 From: David Cates Date: Fri, 5 Apr 2024 14:46:30 -0700 Subject: [PATCH 196/225] add vrf to ssot sync, fix vlan bug, update logging --- nautobot_device_onboarding/constants.py | 1 - nautobot_device_onboarding/datasources.py | 2 +- .../adapters/network_importer_adapters.py | 122 ++++++- .../diffsync/adapters/onboarding_adapters.py | 4 +- .../models/network_importer_models.py | 140 +++++++-- nautobot_device_onboarding/jinja_filters.py | 1 + nautobot_device_onboarding/jobs.py | 6 +- .../nornir_plays/command_getter.py | 5 +- .../nornir_plays/empty_inventory.py | 1 + .../nornir_plays/formatter.py | 3 +- .../nornir_plays/transform.py | 1 + nautobot_device_onboarding/utils/helper.py | 5 +- poetry.lock | 297 +++--------------- 13 files changed, 279 insertions(+), 309 deletions(-) diff --git a/nautobot_device_onboarding/constants.py b/nautobot_device_onboarding/constants.py index 440db241..d278c9c8 100644 --- a/nautobot_device_onboarding/constants.py +++ b/nautobot_device_onboarding/constants.py @@ -2,7 +2,6 @@ from django.conf import settings - PLUGIN_CFG = settings.PLUGINS_CONFIG["nautobot_device_onboarding"] # DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), "command_mappers")) diff --git a/nautobot_device_onboarding/datasources.py b/nautobot_device_onboarding/datasources.py index 313fb62d..d39b80f1 100755 --- a/nautobot_device_onboarding/datasources.py +++ b/nautobot_device_onboarding/datasources.py @@ -1,7 +1,7 @@ """Datasources to override command_mapper yaml files.""" -from nautobot.extras.choices import LogLevelChoices from nautobot.apps.datasources import DatasourceContent +from nautobot.extras.choices import LogLevelChoices def refresh_git_command_mappers(repository_record, job_result, delete=False): # pylint: disable=unused-argument diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index 4c71cd61..ab605a78 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -4,7 +4,7 @@ from diffsync.enum import DiffSyncModelFlags from django.core.exceptions import ValidationError from nautobot.dcim.models import Interface -from nautobot.ipam.models import VLAN, IPAddress +from nautobot.ipam.models import VLAN, VRF, IPAddress from nautobot_ssot.contrib import NautobotAdapter from netaddr import EUI, mac_unix_expanded @@ -35,20 +35,24 @@ class NetworkImporterNautobotAdapter(FilteredNautobotAdapter): ip_address = network_importer_models.NetworkImporterIPAddress ipaddress_to_interface = network_importer_models.NetworkImporterIPAddressToInterface vlan = network_importer_models.NetworkImporterVLAN + vrf = network_importer_models.NetworkImporterVRF tagged_vlans_to_interface = network_importer_models.NetworkImporterTaggedVlansToInterface untagged_vlan_to_interface = network_importer_models.NetworkImporterUnTaggedVlanToInterface lag_to_interface = network_importer_models.NetworkImporterLagToInterface + vrf_to_interface = network_importer_models.NetworkImporterVrfToInterface primary_ips = None top_level = [ "ip_address", "vlan", + "vrf", "device", "ipaddress_to_interface", "untagged_vlan_to_interface", "tagged_vlans_to_interface", "lag_to_interface", + "vrf_to_interface", ] def _cache_primary_ips(self, device_queryset): @@ -116,7 +120,7 @@ def load_vlans(self): """ Load Vlans into the Diffsync store. - Only Vlans that were returned by the CommandGetter job should be loaded. + Only Vlans that were returned by the CommandGetter job should be synced. """ for vlan in VLAN.objects.all(): network_vlan = self.vlan( @@ -134,7 +138,8 @@ def load_vlans(self): pass def load_tagged_vlans_to_interface(self): - """Load a model representing tagged vlan assignments to the Diffsync store. + """ + Load Tagged VLAN interface assignments into the Diffsync store. Only Vlan assignments that were returned by the CommandGetter job should be loaded. """ @@ -158,9 +163,10 @@ def load_tagged_vlans_to_interface(self): self.job.logger.debug(f"Tagged Vlan to interface: {network_tagged_vlans_to_interface} loaded.") def load_untagged_vlan_to_interface(self): - """Load a model representing untagged vlan assignments to the Diffsync store. + """ + Load UnTagged VLAN interface assignments into the Diffsync store. - Only Vlan assignments that were returned by the CommandGetter job should be loaded. + Only UnTagged Vlan assignments that were returned by the CommandGetter job should be synced. """ for interface in Interface.objects.filter(device__in=self.job.devices_to_load): untagged_vlan = {} @@ -181,9 +187,9 @@ def load_untagged_vlan_to_interface(self): def load_lag_to_interface(self): """ - Load a model representing lag assignments to the Diffsync store. + Load Lag interface assignments into the Diffsync store. - Only Lag assignments that were returned by the CommandGetter job should be loaded. + Only Lag assignments that were returned by the CommandGetter job should be synced. """ for interface in Interface.objects.filter(device__in=self.job.devices_to_load): network_lag_to_interface = self.lag_to_interface( @@ -197,19 +203,68 @@ def load_lag_to_interface(self): if self.job.debug: self.job.logger.debug(f"Lag to interface {network_lag_to_interface} loaded.") + def load_vrfs(self): + """ + Load Vrfs into the Diffsync store. + + Only Vrfs that were returned by the CommandGetter job should be synced. + """ + for vrf in VRF.objects.all(): + network_vrf = self.vrf( + diffsync=self, + name=vrf.name, + rd=vrf.rd if vrf.rd else "", + namespace__name=vrf.namespace.name, + ) + try: + network_vrf.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(network_vrf) + if self.job.debug: + self.job.logger.debug(f"Vrf {network_vrf} loaded.") + except diffsync.exceptions.ObjectAlreadyExists: + pass + + def load_vrf_to_interface(self): + """ + Load Vrf to interface assignments into the Diffsync store. + + Only Vrf assignments that were returned by the CommandGetter job should be synced. + """ + for interface in Interface.objects.filter(device__in=self.job.devices_to_load): + vrf = {} + if interface.vrf: + vrf["name"] = interface.vrf.name + vrf["rd"] = str(interface.vrf.rd) + + network_vrf_to_interface = self.vrf_to_interface( + diffsync=self, + device__name=interface.device.name, + name=interface.name, + vrf=vrf, + ) + network_vrf_to_interface.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(network_vrf_to_interface) + if self.job.debug: + self.job.logger.debug(f"Vrf to interface: {network_vrf_to_interface} loaded.") + def load(self): """Generic implementation of the load function.""" if not hasattr(self, "top_level") or not self.top_level: raise ValueError("'top_level' needs to be set on the class.") self._cache_primary_ips(device_queryset=self.job.devices_to_load) - + self.job.logger.warning("Called 1") for model_name in self.top_level: if model_name == "ip_address": self.load_ip_addresses() elif model_name == "vlan": if self.job.sync_vlans: + self.job.logger.warning("Called 2") self.load_vlans() + elif model_name == "vrf": + if self.job.sync_vrfs: + self.job.logger.warning("Called 3") + self.load_vrfs() elif model_name == "tagged_vlans_to_interface": if self.job.sync_vlans: self.load_tagged_vlans_to_interface() @@ -218,6 +273,9 @@ def load(self): self.load_untagged_vlan_to_interface() elif model_name == "lag_to_interface": self.load_lag_to_interface() + elif model_name == "vrf_to_interface": + if self.job.sync_vrfs: + self.load_vrf_to_interface() else: diffsync_model = self._get_diffsync_class(model_name) self._load_objects(diffsync_model) @@ -232,9 +290,9 @@ def sync_complete(self, source, diff, *args, **kwargs): This method only runs if data was changed. """ + if self.job.debug: + self.job.logger.debug("Sync Complete method called, checking for missing primary ip addresses...") for device in self.job.devices_to_load.all(): # refresh queryset after sync is complete - if self.job.debug: - self.job.logger.debug("Sync Complete method called, checking for missing primary ip addresses...") if not device.primary_ip: ip_address = "" try: @@ -287,18 +345,22 @@ def __init__(self, *args, job, sync=None, **kwargs): ip_address = network_importer_models.NetworkImporterIPAddress ipaddress_to_interface = network_importer_models.NetworkImporterIPAddressToInterface vlan = network_importer_models.NetworkImporterVLAN + vrf = network_importer_models.NetworkImporterVRF tagged_vlans_to_interface = network_importer_models.NetworkImporterTaggedVlansToInterface untagged_vlan_to_interface = network_importer_models.NetworkImporterUnTaggedVlanToInterface lag_to_interface = network_importer_models.NetworkImporterLagToInterface + vrf_to_interface = network_importer_models.NetworkImporterVrfToInterface top_level = [ "ip_address", "vlan", + "vrf", "device", "ipaddress_to_interface", "untagged_vlan_to_interface", "tagged_vlans_to_interface", "lag_to_interface", + "vrf_to_interface", ] def _handle_failed_devices(self, device_data): @@ -330,7 +392,7 @@ def execute_command_getter(self): self.job.job_result, self.job.logger.getEffectiveLevel(), self.job.job_result.task_kwargs ) if self.job.debug: - self.job.logger.debug(f"Command Getter Job Result: {result}") + self.job.logger.debug(f"Command Getter Result: {result}") # verify data returned is a dict data_type_check = diffsync_utils.check_data_type(result) if self.job.debug: @@ -455,6 +517,25 @@ def load_vlans(self): except diffsync.exceptions.ObjectAlreadyExists: pass + def load_vrfs(self): + """Load vrfs into the Diffsync store.""" + for hostname, device_data in self.job.command_getter_result.items(): # pylint: disable=too-many-nested-blocks + for interface in device_data["interfaces"]: + for _, interface_data in interface.items(): + if interface_data["vrf"]: + network_vrf = self.vrf( + diffsync=self, + name=interface_data["vrf"]["name"], + rd=interface_data["vrf"]["rd"] if interface_data["vrf"]["rd"] else "", + namespace__name=self.job.namespace.name, + ) + try: + self.add(network_vrf) + if self.job.debug: + self.job.logger.debug(f"Vrf {network_vrf} loaded.") + except diffsync.exceptions.ObjectAlreadyExists: + pass + def load_ip_address_to_interfaces(self): """Load ip address interface assignments into the Diffsync store.""" for hostname, device_data in self.job.command_getter_result.items(): # pylint: disable=too-many-nested-blocks @@ -524,15 +605,34 @@ def load_lag_to_interface(self): if self.job.debug: self.job.logger.debug(f"Lag to interface {network_lag_to_interface} loaded.") + def load_vrf_to_interface(self): + """Load Vrf to interface assignments into the Diffsync store.""" + for hostname, device_data in self.job.command_getter_result.items(): + for interface in device_data["interfaces"]: + for interface_name, interface_data in interface.items(): + network_vrf_to_interface = self.vrf_to_interface( + diffsync=self, + device__name=hostname, + name=interface_name, + vrf=interface_data["vrf"], + ) + self.add(network_vrf_to_interface) + if self.job.debug: + self.job.logger.debug(f"Untagged Vlan to interface {network_vrf_to_interface} loaded.") + def load(self): """Load network data.""" self.execute_command_getter() self.load_ip_addresses() if self.job.sync_vlans: self.load_vlans() + if self.job.sync_vrfs: + self.load_vrfs() self.load_devices() self.load_ip_address_to_interfaces() if self.job.sync_vlans: self.load_tagged_vlans_to_interface() self.load_untagged_vlan_to_interface() self.load_lag_to_interface() + if self.job.sync_vrfs: + self.load_vrf_to_interface() diff --git a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py index 2930f530..4130fcbf 100644 --- a/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/onboarding_adapters.py @@ -206,7 +206,7 @@ def execute_command_getter(self): self.job.job_result, self.job.logger.getEffectiveLevel(), self.job.job_result.task_kwargs ) if self.job.debug: - self.job.logger.debug(f"Command Getter Job Result: {result}") + self.job.logger.debug(f"Command Getter Result: {result}") data_type_check = diffsync_utils.check_data_type(result) if self.job.debug: self.job.logger.debug(f"CommandGetter data type check resut: {data_type_check}") @@ -347,7 +347,7 @@ def load_devices(self): ) except KeyError as err: self.job.logger.error( - f"{ip_address}: Unable to load Device due to a missing key in returned data, {err.args}" + f"{ip_address}: Unable to load Device due to a missing key in returned data, {err.args}, {err}" ) if ip_address not in self.failed_ip_addresses: self.failed_ip_addresses.append(ip_address) diff --git a/nautobot_device_onboarding/diffsync/models/network_importer_models.py b/nautobot_device_onboarding/diffsync/models/network_importer_models.py index 0609c400..8149732c 100644 --- a/nautobot_device_onboarding/diffsync/models/network_importer_models.py +++ b/nautobot_device_onboarding/diffsync/models/network_importer_models.py @@ -8,7 +8,7 @@ from nautobot.dcim.choices import InterfaceTypeChoices from nautobot.dcim.models import Device, Interface, Location from nautobot.extras.models import Status -from nautobot.ipam.models import VLAN, IPAddress, IPAddressToInterface +from nautobot.ipam.models import VLAN, VRF, IPAddress, IPAddressToInterface from nautobot_ssot.contrib import NautobotModel from nautobot_device_onboarding.utils import diffsync_utils @@ -152,8 +152,6 @@ def update(self, attrs): ip_address = IPAddress.objects.get(host=self.host, parent__namespace=self.diffsync.job.namespace) except ObjectDoesNotExist as err: self.job.logger.error(f"{self} failed to update, {err}") - if self.diffsync.job.debug: - self.diffsync.job.logger.debug(f"Updating {self} with attrs: {attrs}") if attrs.get("mask_length"): ip_address.mask_length = attrs["mask_length"] if attrs.get("status__name"): @@ -171,8 +169,8 @@ def update(self, attrs): class NetworkImporterIPAddressToInterface(FilteredNautobotModel): """Shared data model representing an IPAddressToInterface.""" - _model = IPAddressToInterface _modelname = "ipaddress_to_interface" + _model = IPAddressToInterface _identifiers = ("interface__device__name", "interface__name", "ip_address__host") interface__device__name: str @@ -251,7 +249,7 @@ def _get_and_assign_tagged_vlans(cls, diffsync, attrs, interface): interface.tagged_vlans.add(nautobot_vlan) except ObjectDoesNotExist: diffsync.job.logger.error( - f"Failed to assign tagged vlan to {interface}, unable to locate a vlan " + f"Failed to assign tagged vlan to {interface.device}:{interface}, unable to locate a vlan " f"with attributes [name: {network_vlan['name']}, vid: {network_vlan['id']} " f"location: {interface.device.location}]" ) @@ -327,7 +325,7 @@ def _get_and_assign_untagged_vlan(cls, diffsync, attrs, interface): interface.untagged_vlan = vlan except ObjectDoesNotExist: diffsync.job.logger.error( - f"Failed to assign untagged vlan to {interface}, unable to locate a vlan with " + f"Failed to assign untagged vlan to {interface.device}:{interface}, unable to locate a vlan with " f"attributes [name: {attrs['untagged_vlan']['name']}, vid: {attrs['untagged_vlan']['id']} " f"location: {interface.device.location}]" ) @@ -345,16 +343,14 @@ def create(cls, diffsync, ids, attrs): f"attributes: [device__name: {ids['device__name']} name: {ids['name']}] was not found." ) raise diffsync_exceptions.ObjectNotCreated - if attrs.get("untagged_vlan"): - cls._get_and_assign_untagged_vlan(diffsync, attrs, interface) - if interface: - try: - interface.validated_save() - except ValidationError as err: - diffsync.job.logger.error( - f"Failed to assign untagged vlan {attrs['untagged_vlan']} to {interface} on {interface.device}, {err}" - ) - raise diffsync_exceptions.ObjectNotCreated + cls._get_and_assign_untagged_vlan(diffsync, attrs, interface) + try: + interface.validated_save() + except ValidationError as err: + diffsync.job.logger.error( + f"Failed to assign untagged vlan {attrs['untagged_vlan']} to {interface} on {interface.device}, {err}" + ) + raise diffsync_exceptions.ObjectNotCreated return super().create(diffsync, ids, attrs) def update(self, attrs): @@ -368,16 +364,14 @@ def update(self, attrs): f"attributes: [{self.get_identifiers}] was not found." ) raise diffsync_exceptions.ObjectNotUpdated - if attrs.get("untagged_vlan"): - self._get_and_assign_untagged_vlan(self.diffsync, attrs, interface) - if interface: - try: - interface.validated_save() - except ValidationError as err: - self.diffsync.job.logger.error( - f"Failed to assign untagged vlans {attrs['untagged_vlan']} to {interface} on {interface.device}, {err}" - ) - raise diffsync_exceptions.ObjectNotUpdated + self._get_and_assign_untagged_vlan(self.diffsync, attrs, interface) + try: + interface.validated_save() + except ValidationError as err: + self.diffsync.job.logger.error( + f"Failed to assign untagged vlans {attrs['untagged_vlan']} to {interface} on {interface.device}, {err}" + ) + raise diffsync_exceptions.ObjectNotUpdated return super().update(attrs) @@ -415,7 +409,7 @@ def create(cls, diffsync, ids, attrs): interface.validated_save() except ObjectDoesNotExist: diffsync.job.logger.error( - f"Failed to assign lag to {interface}, unable to locate a lag interface " + f"Failed to assign lag to {interface.device}:{interface}, unable to locate a lag interface " f"with attributes [name: {attrs['lag__interface__name']}, device: {interface.device.name} " f"type: {InterfaceTypeChoices.TYPE_LAG}]" ) @@ -460,4 +454,96 @@ def update(self, attrs): return super().update(attrs) +class NetworkImporterVRF(FilteredNautobotModel): + """Shared data model representing a VRF.""" + + _modelname = "vrf" + _model = VRF + _identifiers = ("rd", "name", "namespace__name") + + rd: str + name: str + namespace__name: str + + +class NetworkImporterVrfToInterface(DiffSyncModel): + """Shared data model representing a UnTaggedVlanToInterface.""" + + _modelname = "vrf_to_interface" + _identifiers = ("device__name", "name") + _attributes = ("vrf",) + + device__name: str + name: str + + vrf: Optional[dict] + + @classmethod + def _get_and_assign_vrf(cls, diffsync, attrs, interface): + """Assign a vrf to an interface.""" + try: + vrf = VRF.objects.get( + name=attrs["vrf"]["name"], + rd=attrs["vrf"]["rd"], + namespace=diffsync.job.namespace, + ) + except ObjectDoesNotExist: + diffsync.job.logger.error( + f"Failed to assign vrf to {interface.device}:{interface}, unable to locate a vrf with attributes " + f"[name: {attrs['vrf']['name']}, rd: {attrs['vrf']['rd']} " + f"namespace: {diffsync.job.namespace}]" + ) + raise diffsync_exceptions.ObjectNotCreated + try: + vrf.devices.add(interface.device) + vrf.validated_save() + except Exception as err: + diffsync.logger.error(f"Failed to assign device: {interface.device} to vrf: {vrf}, {err}") + raise diffsync_exceptions.ObjectNotCreated + interface.vrf = vrf + + @classmethod + def create(cls, diffsync, ids, attrs): + """Assign an untagged vlan to an interface.""" + if attrs.get("vrf"): + try: + interface = Interface.objects.get(device__name=ids["device__name"], name=ids["name"]) + except ObjectDoesNotExist: + diffsync.job.logger.error( + f"Failed to assign vrf {attrs['untagged_vlan']}. An interface with attributes: " + f"[device__name: {ids['device__name']} name: {ids['name']}] was not found." + ) + raise diffsync_exceptions.ObjectNotCreated + cls._get_and_assign_vrf(diffsync, attrs, interface) + try: + interface.validated_save() + except ValidationError as err: + diffsync.job.logger.error( + f"Failed to assign vrf {attrs['vrf']} to {interface} on {interface.device}, {err}" + ) + raise diffsync_exceptions.ObjectNotCreated + return super().create(diffsync, ids, attrs) + + def update(self, attrs): + """Update the vrf on an interface.""" + if attrs.get("vrf"): + try: + interface = Interface.objects.get(**self.get_identifiers()) + except ObjectDoesNotExist: + self.diffsync.job.logger.error( + f"Failed to assign vrf {attrs['vrf']['name']}. " + f"An interface with attributes: [{self.get_identifiers}] was not found." + ) + raise diffsync_exceptions.ObjectNotUpdated + self._get_and_assign_vrf(self.diffsync, attrs, interface) + try: + interface.validated_save() + except ValidationError as err: + self.diffsync.job.logger.error( + f"Failed to assign vrf {attrs['vrf']} to {interface} on {interface.device}, {err}" + ) + raise diffsync_exceptions.ObjectNotUpdated + return super().update(attrs) + + # TODO: Cable Model diff --git a/nautobot_device_onboarding/jinja_filters.py b/nautobot_device_onboarding/jinja_filters.py index 1b32a2dd..a9270526 100755 --- a/nautobot_device_onboarding/jinja_filters.py +++ b/nautobot_device_onboarding/jinja_filters.py @@ -3,6 +3,7 @@ # https://docs.nautobot.com/projects/core/en/stable/development/apps/api/platform-features/jinja2-filters/ from django_jinja import library + from nautobot_device_onboarding.constants import INTERFACE_TYPE_MAP_STATIC diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 7cb08d4e..3f047763 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -25,9 +25,9 @@ OnboardingNetworkAdapter, ) from nautobot_device_onboarding.exceptions import OnboardException -from nautobot_device_onboarding.utils.helper import onboarding_task_fqdn_to_ip from nautobot_device_onboarding.netdev_keeper import NetdevKeeper from nautobot_device_onboarding.nornir_plays.command_getter import command_getter_do, command_getter_ni +from nautobot_device_onboarding.utils.helper import onboarding_task_fqdn_to_ip PLUGIN_SETTINGS = settings.PLUGINS_CONFIG["nautobot_device_onboarding"] @@ -533,6 +533,7 @@ class Meta: debug = BooleanVar(description="Enable for more verbose logging.") sync_vlans = BooleanVar(default=False, description="Sync VLANs and interface VLAN assignments.") + sync_vrfs = BooleanVar(default=False, description="Sync VRFs and interface VRF assignments.") namespace = ObjectVar( model=Namespace, required=True, description="The namespace for all IP addresses created or updated in the sync." ) @@ -599,6 +600,7 @@ def run( devices, device_role, sync_vlans, + sync_vrfs, *args, **kwargs, ): @@ -614,6 +616,7 @@ def run( self.devices = devices self.device_role = device_role self.sync_vlans = sync_vlans + self.sync_vrfs = sync_vrfs # Filter devices based on form input device_filter = {} @@ -636,6 +639,7 @@ def run( "devices": self.filtered_devices, "device_role": device_role, "sync_vlans": sync_vlans, + "sync_vrfs": sync_vrfs, } super().run(dryrun, memory_profiling, *args, **kwargs) diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index 9e73c5d3..bb9eb727 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -2,6 +2,7 @@ # pylint: disable=relative-beyond-top-level from typing import Dict + from django.conf import settings from nautobot.dcim.models import Platform from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices @@ -16,10 +17,10 @@ from nautobot_device_onboarding.constants import NETMIKO_TO_NAPALM_STATIC from nautobot_device_onboarding.nornir_plays.empty_inventory import EmptyInventory +from nautobot_device_onboarding.nornir_plays.formatter import format_results +from nautobot_device_onboarding.nornir_plays.inventory_creator import _set_inventory from nautobot_device_onboarding.nornir_plays.logger import NornirLogger from nautobot_device_onboarding.nornir_plays.processor import ProcessorDO -from nautobot_device_onboarding.nornir_plays.inventory_creator import _set_inventory -from nautobot_device_onboarding.nornir_plays.formatter import format_results from nautobot_device_onboarding.nornir_plays.transform import add_platform_parsing_info InventoryPluginRegister.register("nautobot-inventory", NautobotORMInventory) diff --git a/nautobot_device_onboarding/nornir_plays/empty_inventory.py b/nautobot_device_onboarding/nornir_plays/empty_inventory.py index dd01e7dd..4da4e15e 100755 --- a/nautobot_device_onboarding/nornir_plays/empty_inventory.py +++ b/nautobot_device_onboarding/nornir_plays/empty_inventory.py @@ -1,6 +1,7 @@ """Empty Nornir Inventory Plugin.""" from nornir.core.inventory import Defaults, Groups, Hosts, Inventory + from nautobot_device_onboarding.nornir_plays.transform import add_platform_parsing_info diff --git a/nautobot_device_onboarding/nornir_plays/formatter.py b/nautobot_device_onboarding/nornir_plays/formatter.py index 324782a1..ebfc24d3 100755 --- a/nautobot_device_onboarding/nornir_plays/formatter.py +++ b/nautobot_device_onboarding/nornir_plays/formatter.py @@ -1,12 +1,13 @@ """Formatter.""" import json + from django.template import engines from django.utils.module_loading import import_string from jdiff import extract_data_from_json from jinja2.sandbox import SandboxedEnvironment -from netutils.interface import canonical_interface_name from nautobot.dcim.models import Device +from netutils.interface import canonical_interface_name from nautobot_device_onboarding.constants import INTERFACE_TYPE_MAP_STATIC diff --git a/nautobot_device_onboarding/nornir_plays/transform.py b/nautobot_device_onboarding/nornir_plays/transform.py index 3bbfef8d..ad1d9ef1 100755 --- a/nautobot_device_onboarding/nornir_plays/transform.py +++ b/nautobot_device_onboarding/nornir_plays/transform.py @@ -1,6 +1,7 @@ """Adds command mapper, platform parsing info.""" import os + import yaml from nautobot.extras.models import GitRepository diff --git a/nautobot_device_onboarding/utils/helper.py b/nautobot_device_onboarding/utils/helper.py index b8aae847..f4c28d77 100644 --- a/nautobot_device_onboarding/utils/helper.py +++ b/nautobot_device_onboarding/utils/helper.py @@ -1,13 +1,14 @@ """General helper functions for the app.""" import socket + import netaddr -from netaddr.core import AddrFormatError from nautobot.dcim.filters import DeviceFilterSet from nautobot.dcim.models import Device +from netaddr.core import AddrFormatError from nornir_nautobot.exceptions import NornirNautobotException -from nautobot_device_onboarding.exceptions import OnboardException +from nautobot_device_onboarding.exceptions import OnboardException FIELDS_PK = { "location", diff --git a/poetry.lock b/poetry.lock index 4b957c38..baca4e6d 100755 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "amqp" version = "5.2.0" description = "Low-level AMQP client for Python (fork of amqplib)." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -19,7 +18,6 @@ vine = ">=5.0.0,<6.0.0" name = "aniso8601" version = "7.0.0" description = "A library for parsing ISO 8601 strings." -category = "main" optional = false python-versions = "*" files = [ @@ -31,7 +29,6 @@ files = [ name = "anyio" version = "4.3.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -54,7 +51,6 @@ trio = ["trio (>=0.23)"] name = "appnope" version = "0.1.4" description = "Disable App Nap on macOS >= 10.9" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -66,7 +62,6 @@ files = [ name = "asgiref" version = "3.8.1" description = "ASGI specs, helper code, and adapters" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -84,7 +79,6 @@ tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] name = "astroid" version = "3.1.0" description = "An abstract syntax tree for Python with inference support." -category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -99,7 +93,6 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} name = "asttokens" version = "2.4.1" description = "Annotate AST trees with source code positions" -category = "dev" optional = false python-versions = "*" files = [ @@ -118,7 +111,6 @@ test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] name = "astunparse" version = "1.6.3" description = "An AST unparser for Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -134,7 +126,6 @@ wheel = ">=0.23.0,<1.0" name = "async-timeout" version = "4.0.3" description = "Timeout context manager for asyncio programs" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -146,7 +137,6 @@ files = [ name = "attrs" version = "23.2.0" description = "Classes Without Boilerplate" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -164,25 +154,23 @@ tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "p [[package]] name = "autopep8" -version = "2.1.0" +version = "2.0.0" description = "A tool that automatically formats Python code to conform to the PEP 8 style guide" -category = "main" optional = false -python-versions = ">=3.8" +python-versions = "*" files = [ - {file = "autopep8-2.1.0-py2.py3-none-any.whl", hash = "sha256:2bb76888c5edbcafe6aabab3c47ba534f5a2c2d245c2eddced4a30c4b4946357"}, - {file = "autopep8-2.1.0.tar.gz", hash = "sha256:1fa8964e4618929488f4ec36795c7ff12924a68b8bf01366c094fc52f770b6e7"}, + {file = "autopep8-2.0.0-py2.py3-none-any.whl", hash = "sha256:ad924b42c2e27a1ac58e432166cc4588f5b80747de02d0d35b1ecbd3e7d57207"}, + {file = "autopep8-2.0.0.tar.gz", hash = "sha256:8b1659c7f003e693199f52caffdc06585bb0716900bbc6a7442fd931d658c077"}, ] [package.dependencies] -pycodestyle = ">=2.11.0" -tomli = {version = "*", markers = "python_version < \"3.11\""} +pycodestyle = ">=2.9.1" +tomli = "*" [[package]] name = "backcall" version = "0.2.0" description = "Specifications for callback functions passed in to an API" -category = "dev" optional = false python-versions = "*" files = [ @@ -194,7 +182,6 @@ files = [ name = "backports-zoneinfo" version = "0.2.1" description = "Backport of the standard library zoneinfo module" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -226,7 +213,6 @@ tzdata = ["tzdata"] name = "bandit" version = "1.7.8" description = "Security oriented static analyser for python code." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -251,7 +237,6 @@ yaml = ["PyYAML"] name = "bcrypt" version = "4.1.2" description = "Modern password hashing for your software and your servers" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -292,7 +277,6 @@ typecheck = ["mypy"] name = "billiard" version = "4.2.0" description = "Python multiprocessing fork with improvements and bugfixes" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -304,7 +288,6 @@ files = [ name = "black" version = "24.3.0" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -351,7 +334,6 @@ uvloop = ["uvloop (>=0.15.2)"] name = "celery" version = "5.3.6" description = "Distributed Task Queue." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -408,7 +390,6 @@ zstd = ["zstandard (==0.22.0)"] name = "certifi" version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -420,7 +401,6 @@ files = [ name = "cffi" version = "1.16.0" description = "Foreign Function Interface for Python calling C code." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -485,7 +465,6 @@ pycparser = "*" name = "charset-normalizer" version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -585,7 +564,6 @@ files = [ name = "click" version = "8.1.7" description = "Composable command line interface toolkit" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -600,7 +578,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "click-default-group" version = "1.2.4" description = "click_default_group" -category = "dev" optional = false python-versions = ">=2.7" files = [ @@ -618,7 +595,6 @@ test = ["pytest"] name = "click-didyoumean" version = "0.3.1" description = "Enables git-like *did-you-mean* feature in click" -category = "main" optional = false python-versions = ">=3.6.2" files = [ @@ -633,7 +609,6 @@ click = ">=7" name = "click-plugins" version = "1.1.1" description = "An extension module for click to enable registering CLI commands via setuptools entry-points." -category = "main" optional = false python-versions = "*" files = [ @@ -651,7 +626,6 @@ dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] name = "click-repl" version = "0.3.0" description = "REPL plugin for Click" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -670,7 +644,6 @@ testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -682,7 +655,6 @@ files = [ name = "coverage" version = "7.4.4" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -747,7 +719,6 @@ toml = ["tomli"] name = "cron-descriptor" version = "1.4.3" description = "A Python library that converts cron expressions into human readable strings." -category = "main" optional = false python-versions = "*" files = [ @@ -762,7 +733,6 @@ dev = ["polib"] name = "cryptography" version = "42.0.5" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -817,7 +787,6 @@ test-randomorder = ["pytest-randomly"] name = "decorator" version = "5.1.1" description = "Decorators for Humans" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -829,7 +798,6 @@ files = [ name = "deepdiff" version = "6.7.1" description = "Deep Difference and Search of any Python object/data. Recreate objects by adding adding deltas to each other." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -848,7 +816,6 @@ optimize = ["orjson"] name = "defusedxml" version = "0.7.1" description = "XML bomb protection for Python stdlib modules" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -860,7 +827,6 @@ files = [ name = "diffsync" version = "1.10.0" description = "Library to easily sync/diff/update 2 different data sources" -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -882,7 +848,6 @@ redis = ["redis (>=4.3,<5.0)"] name = "dill" version = "0.3.8" description = "serialize all of Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -898,7 +863,6 @@ profile = ["gprof2dot (>=2022.7.29)"] name = "django" version = "3.2.25" description = "A high-level Python Web framework that encourages rapid development and clean, pragmatic design." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -919,7 +883,6 @@ bcrypt = ["bcrypt"] name = "django-ajax-tables" version = "1.1.1" description = "Django tag for ajax-enabled tables" -category = "main" optional = false python-versions = "*" files = [ @@ -931,7 +894,6 @@ files = [ name = "django-celery-beat" version = "2.5.0" description = "Database-backed Periodic Tasks." -category = "main" optional = false python-versions = "*" files = [ @@ -952,7 +914,6 @@ tzdata = "*" name = "django-celery-results" version = "2.4.0" description = "Celery result backends for Django." -category = "main" optional = false python-versions = "*" files = [ @@ -967,7 +928,6 @@ celery = ">=5.2.3,<6.0" name = "django-constance" version = "2.9.1" description = "Django live settings with pluggable backends, including Redis." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -986,7 +946,6 @@ redis = ["redis"] name = "django-cors-headers" version = "4.2.0" description = "django-cors-headers is a Django application for handling the server headers required for Cross-Origin Resource Sharing (CORS)." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1001,7 +960,6 @@ Django = ">=3.2" name = "django-db-file-storage" version = "0.5.6.1" description = "Custom FILE_STORAGE for Django. Saves files in your database instead of your file system." -category = "main" optional = false python-versions = "*" files = [ @@ -1016,7 +974,6 @@ Django = "*" name = "django-debug-toolbar" version = "4.3.0" description = "A configurable set of panels that display various debug information about the current request/response." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1032,7 +989,6 @@ sqlparse = ">=0.2" name = "django-extensions" version = "3.2.3" description = "Extensions for Django" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1047,7 +1003,6 @@ Django = ">=3.2" name = "django-filter" version = "23.1" description = "Django-filter is a reusable Django application for allowing users to filter querysets dynamically." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1062,7 +1017,6 @@ Django = ">=3.2" name = "django-health-check" version = "3.17.0" description = "Run checks on services like databases, queue servers, celery processes, etc." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1081,7 +1035,6 @@ test = ["celery", "pytest", "pytest-cov", "pytest-django", "redis"] name = "django-jinja" version = "2.10.2" description = "Jinja2 templating language integrated in Django." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1097,7 +1050,6 @@ jinja2 = ">=3" name = "django-picklefield" version = "3.2" description = "Pickled object field for Django" -category = "main" optional = false python-versions = ">=3" files = [ @@ -1115,7 +1067,6 @@ tests = ["tox"] name = "django-prometheus" version = "2.3.1" description = "Django middlewares to monitor your application with Prometheus.io." -category = "main" optional = false python-versions = "*" files = [ @@ -1130,7 +1081,6 @@ prometheus-client = ">=0.7" name = "django-redis" version = "5.3.0" description = "Full featured redis cache backend for Django." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1149,7 +1099,6 @@ hiredis = ["redis[hiredis] (>=3,!=4.0.0,!=4.0.1)"] name = "django-silk" version = "5.1.0" description = "Silky smooth profiling for the Django Framework" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1167,7 +1116,6 @@ sqlparse = "*" name = "django-tables2" version = "2.6.0" description = "Table/data-grid framework for Django" -category = "main" optional = false python-versions = "*" files = [ @@ -1185,7 +1133,6 @@ tablib = ["tablib"] name = "django-taggit" version = "4.0.0" description = "django-taggit is a reusable Django application for simple tagging." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1200,7 +1147,6 @@ Django = ">=3.2" name = "django-timezone-field" version = "5.1" description = "A Django app providing DB, form, and REST framework fields for zoneinfo and pytz timezone objects." -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -1210,14 +1156,13 @@ files = [ [package.dependencies] "backports.zoneinfo" = {version = ">=0.2.1,<0.3.0", markers = "python_version < \"3.9\""} -Django = ">=2.2,<3.0.0 || >=3.2.0,<5.0" +Django = ">=2.2,<3.0.dev0 || >=3.2.dev0,<5.0" pytz = "*" [[package]] name = "django-tree-queries" version = "0.16.1" description = "Tree queries with explicit opt-in, without configurability" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1232,7 +1177,6 @@ tests = ["coverage"] name = "django-webserver" version = "1.2.0" description = "Django management commands for production webservers" -category = "main" optional = false python-versions = "*" files = [ @@ -1254,7 +1198,6 @@ waitress = ["waitress"] name = "djangorestframework" version = "3.14.0" description = "Web APIs for Django, made easy." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1270,7 +1213,6 @@ pytz = "*" name = "drf-react-template-framework" version = "0.0.17" description = "Django REST Framework plugin that creates form schemas for react-jsonschema-form" -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -1285,7 +1227,6 @@ djangorestframework = ">=3.12.0,<4.0.0" name = "drf-spectacular" version = "0.26.3" description = "Sane and flexible OpenAPI 3 schema generation for Django REST framework" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1310,7 +1251,6 @@ sidecar = ["drf-spectacular-sidecar"] name = "drf-spectacular-sidecar" version = "2024.4.1" description = "Serve self-contained distribution builds of Swagger UI and Redoc with Django" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1325,7 +1265,6 @@ Django = ">=2.2" name = "emoji" version = "2.8.0" description = "Emoji for Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1340,7 +1279,6 @@ dev = ["coverage", "coveralls", "pytest"] name = "exceptiongroup" version = "1.2.0" description = "Backport of PEP 654 (exception groups)" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1355,7 +1293,6 @@ test = ["pytest (>=6)"] name = "executing" version = "2.0.1" description = "Get the currently executing AST node of a frame, and other information" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1368,26 +1305,24 @@ tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipyth [[package]] name = "flake8" -version = "2.3.0" -description = "the modular source code checker: pep8, pyflakes and co" -category = "dev" +version = "5.0.4" +description = "the modular source code checker: pep8 pyflakes and co" optional = false -python-versions = "*" +python-versions = ">=3.6.1" files = [ - {file = "flake8-2.3.0-py2.py3-none-any.whl", hash = "sha256:c99cc9716d6655d9c8bcb1e77632b8615bf0abd282d7abd9f5c2148cad7fc669"}, - {file = "flake8-2.3.0.tar.gz", hash = "sha256:5ee1a43ccd0716d6061521eec6937c983efa027793013e572712c4da55c7c83e"}, + {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, + {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, ] [package.dependencies] -mccabe = ">=0.2.1" -pep8 = ">=1.5.7" -pyflakes = ">=0.8.1" +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.9.0,<2.10.0" +pyflakes = ">=2.5.0,<2.6.0" [[package]] name = "future" version = "1.0.0" description = "Clean single-source support for Python 3 and 2" -category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1399,7 +1334,6 @@ files = [ name = "ghp-import" version = "2.1.0" description = "Copy your docs directly to the gh-pages branch." -category = "dev" optional = false python-versions = "*" files = [ @@ -1417,7 +1351,6 @@ dev = ["flake8", "markdown", "twine", "wheel"] name = "gitdb" version = "4.0.11" description = "Git Object Database" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1432,7 +1365,6 @@ smmap = ">=3.0.1,<6" name = "gitpython" version = "3.1.43" description = "GitPython is a Python library used to interact with Git repositories" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1451,7 +1383,6 @@ test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", name = "gprof2dot" version = "2022.7.29" description = "Generate a dot graph from the output of several profilers." -category = "main" optional = false python-versions = ">=2.7" files = [ @@ -1463,7 +1394,6 @@ files = [ name = "graphene" version = "2.1.9" description = "GraphQL Framework for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -1486,7 +1416,6 @@ test = ["coveralls", "fastdiff (==0.2.0)", "iso8601", "mock", "promise", "pytest name = "graphene-django" version = "2.16.0" description = "Graphene Django integration" -category = "main" optional = false python-versions = "*" files = [ @@ -1511,7 +1440,6 @@ test = ["coveralls", "django-filter (>=2)", "djangorestframework (>=3.6.3)", "mo name = "graphene-django-optimizer" version = "0.8.0" description = "Optimize database access inside graphene queries." -category = "main" optional = false python-versions = "*" files = [ @@ -1522,7 +1450,6 @@ files = [ name = "graphql-core" version = "2.3.2" description = "GraphQL implementation for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -1543,7 +1470,6 @@ test = ["coveralls (==1.11.1)", "cython (==0.29.17)", "gevent (==1.5.0)", "pyann name = "graphql-relay" version = "2.0.1" description = "Relay implementation for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -1560,7 +1486,6 @@ six = ">=1.12" name = "griffe" version = "0.42.1" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1576,7 +1501,6 @@ colorama = ">=0.4" name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1588,7 +1512,6 @@ files = [ name = "httpcore" version = "1.0.5" description = "A minimal low-level HTTP client." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1603,14 +1526,13 @@ h11 = ">=0.13,<0.15" [package.extras] asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] trio = ["trio (>=0.22.0,<0.26.0)"] [[package]] name = "httpx" version = "0.27.0" description = "The next generation HTTP client." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1621,21 +1543,20 @@ files = [ [package.dependencies] anyio = "*" certifi = "*" -httpcore = ">=1.0.0,<2.0.0" +httpcore = "==1.*" idna = "*" sniffio = "*" [package.extras] brotli = ["brotli", "brotlicffi"] -cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<14)"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] [[package]] name = "idna" version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1647,7 +1568,6 @@ files = [ name = "importlib-metadata" version = "4.13.0" description = "Read metadata from Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1667,7 +1587,6 @@ testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packag name = "importlib-resources" version = "6.4.0" description = "Read resources from Python packages" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1686,7 +1605,6 @@ testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "p name = "incremental" version = "22.10.0" description = "\"A small library that versions your Python projects.\"" -category = "dev" optional = false python-versions = "*" files = [ @@ -1702,7 +1620,6 @@ scripts = ["click (>=6.0)", "twisted (>=16.4.0)"] name = "inflection" version = "0.5.1" description = "A port of Ruby on Rails inflector to Python" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1714,7 +1631,6 @@ files = [ name = "invoke" version = "2.2.0" description = "Pythonic task execution" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1726,7 +1642,6 @@ files = [ name = "ipython" version = "8.12.3" description = "IPython: Productive Interactive Computing" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1766,7 +1681,6 @@ test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pa name = "isort" version = "5.13.2" description = "A Python utility / library to sort Python imports." -category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -1781,7 +1695,6 @@ colors = ["colorama (>=0.4.6)"] name = "jdiff" version = "0.0.6" description = "A light-weight library to compare structured output from network devices show commands." -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -1797,7 +1710,6 @@ jmespath = ">=1.0.1,<2.0.0" name = "jedi" version = "0.19.1" description = "An autocompletion tool for Python that can be used for text editors." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1817,7 +1729,6 @@ testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] name = "jinja2" version = "3.1.3" description = "A very fast and expressive template engine." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1835,7 +1746,6 @@ i18n = ["Babel (>=2.7)"] name = "jmespath" version = "1.0.1" description = "JSON Matching Expressions" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1847,7 +1757,6 @@ files = [ name = "jsonschema" version = "4.18.6" description = "An implementation of JSON Schema validation for Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1871,7 +1780,6 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- name = "jsonschema-specifications" version = "2023.12.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1887,7 +1795,6 @@ referencing = ">=0.31.0" name = "junos-eznc" version = "2.7.0" description = "Junos 'EZ' automation for non-programmers" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1912,7 +1819,6 @@ yamlordereddictloader = "*" name = "kombu" version = "5.3.6" description = "Messaging library for Python." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1947,7 +1853,6 @@ zookeeper = ["kazoo (>=2.8.0)"] name = "lxml" version = "5.2.1" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2119,7 +2024,6 @@ source = ["Cython (>=3.0.10)"] name = "markdown" version = "3.5.2" description = "Python implementation of John Gruber's Markdown." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2138,7 +2042,6 @@ testing = ["coverage", "pyyaml"] name = "markdown-it-py" version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2163,7 +2066,6 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "markupsafe" version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2233,7 +2135,6 @@ files = [ name = "matplotlib-inline" version = "0.1.6" description = "Inline Matplotlib backend for Jupyter" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -2248,7 +2149,6 @@ traitlets = "*" name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2260,7 +2160,6 @@ files = [ name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2272,7 +2171,6 @@ files = [ name = "mergedeep" version = "1.3.4" description = "A deep merge function for 🐍." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2284,7 +2182,6 @@ files = [ name = "mkdocs" version = "1.5.2" description = "Project documentation with Markdown." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2316,7 +2213,6 @@ min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-imp name = "mkdocs-autorefs" version = "1.0.1" description = "Automatically link across pages in MkDocs." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2333,7 +2229,6 @@ mkdocs = ">=1.1" name = "mkdocs-material" version = "9.1.15" description = "Documentation that simply works" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2356,7 +2251,6 @@ requests = ">=2.26" name = "mkdocs-material-extensions" version = "1.3.1" description = "Extension pack for Python Markdown and MkDocs Material." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2368,7 +2262,6 @@ files = [ name = "mkdocs-version-annotations" version = "1.0.0" description = "MkDocs plugin to add custom admonitions for documenting version differences" -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -2380,7 +2273,6 @@ files = [ name = "mkdocstrings" version = "0.22.0" description = "Automatic documentation from sources, for MkDocs." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2407,7 +2299,6 @@ python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"] name = "mkdocstrings-python" version = "1.5.2" description = "A Python handler for mkdocstrings." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2423,7 +2314,6 @@ mkdocstrings = ">=0.20" name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -2435,7 +2325,6 @@ files = [ name = "napalm" version = "4.1.0" description = "Network Automation and Programmability Abstraction Layer with Multivendor support" -category = "main" optional = false python-versions = "*" files = [ @@ -2468,7 +2357,6 @@ typing-extensions = ">=4.3.0" name = "nautobot" version = "2.2.0" description = "Source of truth and network automation platform." -category = "main" optional = false python-versions = "<3.12,>=3.8" files = [ @@ -2533,7 +2421,6 @@ sso = ["social-auth-core[openidconnect,saml] (>=4.5.3,<4.6.0)"] name = "nautobot-plugin-nornir" version = "2.0.0" description = "Nautobot Nornir plugin." -category = "main" optional = false python-versions = ">=3.8,<3.12" files = [ @@ -2552,7 +2439,6 @@ nautobot = ["nautobot (>=2.0.0,<3.0.0)"] name = "nautobot-ssot" version = "2.5.0" description = "Nautobot Single Source of Truth" -category = "main" optional = false python-versions = "<3.12,>=3.8" files = [ @@ -2583,7 +2469,6 @@ servicenow = ["Jinja2 (>=2.11.3)", "PyYAML (>=6)", "ijson (>=2.5.1)", "oauthlib name = "ncclient" version = "0.6.15" description = "Python library for NETCONF clients" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -2600,7 +2485,6 @@ six = "*" name = "netaddr" version = "0.8.0" description = "A network address manipulation library for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -2612,7 +2496,6 @@ files = [ name = "netmiko" version = "4.3.0" description = "Multi-vendor library to simplify legacy CLI connections to network devices" -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2632,7 +2515,6 @@ textfsm = ">=1.1.3" name = "netutils" version = "1.8.0" description = "Common helper functions useful in network automation." -category = "main" optional = false python-versions = "<4.0,>=3.8" files = [ @@ -2647,7 +2529,6 @@ optionals = ["jsonschema (>=4.17.3,<5.0.0)", "napalm (>=4.0.0,<5.0.0)"] name = "nh3" version = "0.2.17" description = "Python bindings to the ammonia HTML sanitization library." -category = "main" optional = false python-versions = "*" files = [ @@ -2673,7 +2554,6 @@ files = [ name = "nornir" version = "3.4.1" description = "Pluggable multi-threaded framework with inventory management to help operate collections of devices" -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2690,7 +2570,6 @@ mypy_extensions = ">=1.0.0,<2.0.0" name = "nornir-jinja2" version = "0.2.0" description = "Jinja2 plugins for nornir" -category = "main" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -2706,7 +2585,6 @@ nornir = ">=3,<4" name = "nornir-napalm" version = "0.4.0" description = "NAPALM's plugins for nornir" -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -2722,7 +2600,6 @@ nornir = ">=3,<4" name = "nornir-nautobot" version = "3.2.0" description = "Nornir Nautobot" -category = "main" optional = false python-versions = "<4.0,>=3.8" files = [ @@ -2748,7 +2625,6 @@ mikrotik-driver = ["routeros-api (>=0.17.0,<0.18.0)"] name = "nornir-netmiko" version = "1.0.1" description = "Netmiko's plugins for Nornir" -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2763,7 +2639,6 @@ netmiko = ">=4.0.0,<5.0.0" name = "nornir-utils" version = "0.2.0" description = "Collection of plugins and functions for nornir that don't require external dependencies" -category = "main" optional = false python-versions = ">=3.6.2,<4.0.0" files = [ @@ -2779,7 +2654,6 @@ nornir = ">=3,<4" name = "ntc-templates" version = "4.4.0" description = "TextFSM Templates for Network Devices, and Python wrapper for TextFSM's CliTable." -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2794,7 +2668,6 @@ textfsm = ">=1.1.0,<2.0.0" name = "oauthlib" version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2811,7 +2684,6 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] name = "ordered-set" version = "4.1.0" description = "An OrderedSet is a custom MutableSet that remembers its order, so that every" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2826,7 +2698,6 @@ dev = ["black", "mypy", "pytest"] name = "packaging" version = "23.2" description = "Core utilities for Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2838,7 +2709,6 @@ files = [ name = "paramiko" version = "3.4.0" description = "SSH2 protocol library" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2858,25 +2728,23 @@ invoke = ["invoke (>=2.0)"] [[package]] name = "parso" -version = "0.8.3" +version = "0.8.4" description = "A Python Parser" -category = "dev" optional = false python-versions = ">=3.6" files = [ - {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, - {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, + {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, + {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, ] [package.extras] -qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] -testing = ["docopt", "pytest (<6.0.0)"] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["docopt", "pytest"] [[package]] name = "pathspec" version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2888,7 +2756,6 @@ files = [ name = "pbr" version = "6.0.0" description = "Python Build Reasonableness" -category = "dev" optional = false python-versions = ">=2.6" files = [ @@ -2896,23 +2763,10 @@ files = [ {file = "pbr-6.0.0.tar.gz", hash = "sha256:d1377122a5a00e2f940ee482999518efe16d745d423a670c27773dfbc3c9a7d9"}, ] -[[package]] -name = "pep8" -version = "1.7.1" -description = "Python style guide checker" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "pep8-1.7.1-py2.py3-none-any.whl", hash = "sha256:b22cfae5db09833bb9bd7c8463b53e1a9c9b39f12e304a8d0bba729c501827ee"}, - {file = "pep8-1.7.1.tar.gz", hash = "sha256:fe249b52e20498e59e0b5c5256aa52ee99fc295b26ec9eaa85776ffdb9fe6374"}, -] - [[package]] name = "pexpect" version = "4.9.0" description = "Pexpect allows easy control of interactive console applications." -category = "dev" optional = false python-versions = "*" files = [ @@ -2927,7 +2781,6 @@ ptyprocess = ">=0.5" name = "pickleshare" version = "0.7.5" description = "Tiny 'shelve'-like database with concurrency support" -category = "dev" optional = false python-versions = "*" files = [ @@ -2939,7 +2792,6 @@ files = [ name = "pillow" version = "10.2.0" description = "Python Imaging Library (Fork)" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3025,7 +2877,6 @@ xmp = ["defusedxml"] name = "pkgutil-resolve-name" version = "1.3.10" description = "Resolve a name to an object." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3037,7 +2888,6 @@ files = [ name = "platformdirs" version = "4.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3053,7 +2903,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest- name = "prometheus-client" version = "0.17.1" description = "Python client for the Prometheus monitoring system." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3068,7 +2917,6 @@ twisted = ["twisted"] name = "promise" version = "2.3" description = "Promises/A+ implementation for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -3085,7 +2933,6 @@ test = ["coveralls", "futures", "mock", "pytest (>=2.7.3)", "pytest-benchmark", name = "prompt-toolkit" version = "3.0.43" description = "Library for building powerful interactive command lines in Python" -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -3100,7 +2947,6 @@ wcwidth = "*" name = "psycopg2-binary" version = "2.9.9" description = "psycopg2 - Python-PostgreSQL Database Adapter" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3182,7 +3028,6 @@ files = [ name = "ptyprocess" version = "0.7.0" description = "Run a subprocess in a pseudo terminal" -category = "dev" optional = false python-versions = "*" files = [ @@ -3194,7 +3039,6 @@ files = [ name = "pure-eval" version = "0.2.2" description = "Safely evaluate AST nodes without side effects" -category = "dev" optional = false python-versions = "*" files = [ @@ -3207,21 +3051,19 @@ tests = ["pytest"] [[package]] name = "pycodestyle" -version = "2.11.1" +version = "2.9.1" description = "Python style guide checker" -category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.6" files = [ - {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, - {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, + {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, + {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, ] [[package]] name = "pycparser" version = "2.22" description = "C parser in Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3233,7 +3075,6 @@ files = [ name = "pydantic" version = "1.10.15" description = "Data validation and settings management using python type hints" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3286,7 +3127,6 @@ email = ["email-validator (>=1.0.3)"] name = "pyeapi" version = "1.0.2" description = "Python Client for eAPI" -category = "main" optional = false python-versions = "*" files = [ @@ -3302,21 +3142,19 @@ test = ["coverage", "mock"] [[package]] name = "pyflakes" -version = "3.2.0" +version = "2.5.0" description = "passive checker of Python programs" -category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.6" files = [ - {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, - {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, + {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, + {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, ] [[package]] name = "pygments" version = "2.17.2" description = "Pygments is a syntax highlighting package written in Python." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3332,7 +3170,6 @@ windows-terminal = ["colorama (>=0.4.6)"] name = "pyjwt" version = "2.8.0" description = "JSON Web Token implementation in Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3350,7 +3187,6 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] name = "pylint" version = "3.1.0" description = "python code static checker" -category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -3380,7 +3216,6 @@ testutils = ["gitpython (>3)"] name = "pylint-django" version = "2.5.5" description = "A Pylint plugin to help Pylint understand the Django web framework" -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -3399,7 +3234,6 @@ with-django = ["Django (>=2.2)"] name = "pylint-nautobot" version = "0.3.0" description = "Custom Pylint Rules for Nautobot" -category = "dev" optional = false python-versions = ">=3.8,<3.12" files = [ @@ -3417,7 +3251,6 @@ toml = ">=0.10.2" name = "pylint-plugin-utils" version = "0.8.2" description = "Utilities and helpers for writing Pylint plugins" -category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -3432,7 +3265,6 @@ pylint = ">=1.7" name = "pymdown-extensions" version = "10.7.1" description = "Extension pack for Python Markdown." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3451,7 +3283,6 @@ extra = ["pygments (>=2.12)"] name = "pynacl" version = "1.5.0" description = "Python binding to the Networking and Cryptography (NaCl) library" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3478,7 +3309,6 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] name = "pynautobot" version = "2.1.1" description = "Nautobot API client library" -category = "main" optional = false python-versions = "<4.0,>=3.8" files = [ @@ -3495,7 +3325,6 @@ urllib3 = ">=1.21.1,<1.27" name = "pyparsing" version = "3.1.2" description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "main" optional = false python-versions = ">=3.6.8" files = [ @@ -3510,7 +3339,6 @@ diagrams = ["jinja2", "railroad-diagrams"] name = "pyserial" version = "3.5" description = "Python Serial Port Extension" -category = "main" optional = false python-versions = "*" files = [ @@ -3525,7 +3353,6 @@ cp2110 = ["hidapi"] name = "python-crontab" version = "3.0.0" description = "Python Crontab API" -category = "main" optional = false python-versions = "*" files = [ @@ -3544,7 +3371,6 @@ cron-schedule = ["croniter"] name = "python-dateutil" version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -3559,7 +3385,6 @@ six = ">=1.5" name = "python-slugify" version = "8.0.4" description = "A Python slugify application that also handles Unicode" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3577,7 +3402,6 @@ unidecode = ["Unidecode (>=1.1.1)"] name = "python3-openid" version = "3.2.0" description = "OpenID support for modern servers and consumers." -category = "main" optional = false python-versions = "*" files = [ @@ -3596,7 +3420,6 @@ postgresql = ["psycopg2"] name = "pytz" version = "2024.1" description = "World timezone definitions, modern and historical" -category = "main" optional = false python-versions = "*" files = [ @@ -3608,7 +3431,6 @@ files = [ name = "pyuwsgi" version = "2.0.23.post0" description = "The uWSGI server" -category = "main" optional = false python-versions = "*" files = [ @@ -3661,7 +3483,6 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3721,7 +3542,6 @@ files = [ name = "pyyaml-env-tag" version = "0.1" description = "A custom YAML tag for referencing environment variables in YAML files. " -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3736,7 +3556,6 @@ pyyaml = "*" name = "redis" version = "5.0.3" description = "Python client for Redis database and key-value store" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3755,7 +3574,6 @@ ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)" name = "referencing" version = "0.34.0" description = "JSON Referencing + Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3771,7 +3589,6 @@ rpds-py = ">=0.7.0" name = "regex" version = "2023.12.25" description = "Alternative regular expression module, to replace re." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3874,7 +3691,6 @@ files = [ name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3896,7 +3712,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-oauthlib" version = "2.0.0" description = "OAuthlib authentication support for Requests." -category = "main" optional = false python-versions = ">=3.4" files = [ @@ -3915,7 +3730,6 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] name = "rich" version = "13.7.1" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -3935,7 +3749,6 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] name = "rpds-py" version = "0.18.0" description = "Python bindings to Rust's persistent data structures (rpds)" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4044,7 +3857,6 @@ files = [ name = "ruamel-yaml" version = "0.18.6" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4063,7 +3875,6 @@ jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] name = "ruamel-yaml-clib" version = "0.2.8" description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4123,7 +3934,6 @@ files = [ name = "ruff" version = "0.3.5" description = "An extremely fast Python linter and code formatter, written in Rust." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4150,7 +3960,6 @@ files = [ name = "rx" version = "1.6.3" description = "Reactive Extensions (Rx) for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -4161,7 +3970,6 @@ files = [ name = "scp" version = "0.14.5" description = "scp module for paramiko" -category = "main" optional = false python-versions = "*" files = [ @@ -4176,7 +3984,6 @@ paramiko = "*" name = "setuptools" version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4193,7 +4000,6 @@ testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jar name = "singledispatch" version = "4.1.0" description = "Backport functools.singledispatch to older Pythons." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4209,7 +4015,6 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -4221,7 +4026,6 @@ files = [ name = "smmap" version = "5.0.1" description = "A pure Python implementation of a sliding window memory map manager" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4233,7 +4037,6 @@ files = [ name = "sniffio" version = "1.3.1" description = "Sniff out which async library your code is running under" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4245,7 +4048,6 @@ files = [ name = "social-auth-app-django" version = "5.2.0" description = "Python Social Authentication, Django integration." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4261,7 +4063,6 @@ social-auth-core = ">=4.4.1" name = "social-auth-core" version = "4.5.3" description = "Python social authentication made simple." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4288,7 +4089,6 @@ saml = ["python3-saml (>=1.5.0)"] name = "sqlparse" version = "0.4.4" description = "A non-validating SQL parser." -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -4305,7 +4105,6 @@ test = ["pytest", "pytest-cov"] name = "stack-data" version = "0.6.3" description = "Extract data from python stack frames and tracebacks for informative displays" -category = "dev" optional = false python-versions = "*" files = [ @@ -4325,7 +4124,6 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] name = "stevedore" version = "5.2.0" description = "Manage dynamic plugins for Python applications" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -4340,7 +4138,6 @@ pbr = ">=2.0.0,<2.1.0 || >2.1.0" name = "structlog" version = "22.3.0" description = "Structured Logging for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4358,7 +4155,6 @@ typing = ["mypy", "rich", "twisted"] name = "svgwrite" version = "1.4.3" description = "A Python library to create SVG drawings." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4370,7 +4166,6 @@ files = [ name = "text-unidecode" version = "1.3" description = "The most basic Text::Unidecode port" -category = "main" optional = false python-versions = "*" files = [ @@ -4382,7 +4177,6 @@ files = [ name = "textfsm" version = "1.1.3" description = "Python module for parsing semi-structured text into python tables." -category = "main" optional = false python-versions = "*" files = [ @@ -4398,7 +4192,6 @@ six = "*" name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" -category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -4410,7 +4203,6 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4422,7 +4214,6 @@ files = [ name = "tomlkit" version = "0.12.4" description = "Style preserving TOML library" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4434,7 +4225,6 @@ files = [ name = "towncrier" version = "23.6.0" description = "Building newsfiles for your project." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4457,7 +4247,6 @@ dev = ["furo", "packaging", "sphinx (>=5)", "twisted"] name = "traitlets" version = "5.14.2" description = "Traitlets Python configuration system" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -4473,7 +4262,6 @@ test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0, name = "transitions" version = "0.9.0" description = "A lightweight, object-oriented Python state machine implementation with many extensions." -category = "main" optional = false python-versions = "*" files = [ @@ -4492,7 +4280,6 @@ test = ["pytest"] name = "ttp" version = "0.9.5" description = "Template Text Parser" -category = "main" optional = false python-versions = ">=2.7,<4.0" files = [ @@ -4508,7 +4295,6 @@ full = ["cerberus (>=1.3.0,<1.4.0)", "deepdiff (>=5.8.0,<5.9.0)", "jinja2 (>=3.0 name = "ttp-templates" version = "0.3.6" description = "Template Text Parser Templates collections" -category = "main" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -4524,21 +4310,19 @@ docs = ["mkdocs (==1.2.4)", "mkdocs-material (==7.2.2)", "mkdocs-material-extens [[package]] name = "typing-extensions" -version = "4.10.0" +version = "4.11.0" description = "Backported and Experimental Type Hints for Python 3.8+" -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, - {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, ] [[package]] name = "tzdata" version = "2024.1" description = "Provider of IANA time zone data" -category = "main" optional = false python-versions = ">=2" files = [ @@ -4550,7 +4334,6 @@ files = [ name = "uritemplate" version = "4.1.1" description = "Implementation of RFC 6570 URI Templates" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4562,7 +4345,6 @@ files = [ name = "urllib3" version = "1.26.18" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -4579,7 +4361,6 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "vine" version = "5.1.0" description = "Python promises." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4591,7 +4372,6 @@ files = [ name = "watchdog" version = "4.0.0" description = "Filesystem events monitoring" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -4633,7 +4413,6 @@ watchmedo = ["PyYAML (>=3.10)"] name = "wcwidth" version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" -category = "main" optional = false python-versions = "*" files = [ @@ -4645,7 +4424,6 @@ files = [ name = "wheel" version = "0.43.0" description = "A built-package format for Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -4660,7 +4438,6 @@ test = ["pytest (>=6.0.0)", "setuptools (>=65)"] name = "yamllint" version = "1.35.1" description = "A linter for YAML files." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -4679,7 +4456,6 @@ dev = ["doc8", "flake8", "flake8-import-order", "rstcheck[sphinx]", "sphinx"] name = "yamlordereddictloader" version = "0.4.2" description = "YAML loader and dumper for PyYAML allowing to keep keys order." -category = "main" optional = false python-versions = "*" files = [ @@ -4694,7 +4470,6 @@ pyyaml = "*" name = "zipp" version = "3.18.1" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" optional = false python-versions = ">=3.8" files = [ From c598539eb00194a149bae8f9af88441bf55404b3 Mon Sep 17 00:00:00 2001 From: David Cates Date: Fri, 5 Apr 2024 14:54:28 -0700 Subject: [PATCH 197/225] remove unnecessary logging --- .../diffsync/adapters/network_importer_adapters.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index ab605a78..49eb64c9 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -253,17 +253,14 @@ def load(self): raise ValueError("'top_level' needs to be set on the class.") self._cache_primary_ips(device_queryset=self.job.devices_to_load) - self.job.logger.warning("Called 1") for model_name in self.top_level: if model_name == "ip_address": self.load_ip_addresses() elif model_name == "vlan": if self.job.sync_vlans: - self.job.logger.warning("Called 2") self.load_vlans() elif model_name == "vrf": if self.job.sync_vrfs: - self.job.logger.warning("Called 3") self.load_vrfs() elif model_name == "tagged_vlans_to_interface": if self.job.sync_vlans: From 043fd4dd6f5f6c0807ad50cb432719764cefcf0e Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Mon, 8 Apr 2024 22:37:44 +0000 Subject: [PATCH 198/225] updates for VRF support --- .../command_mappers/cisco_nxos.yml | 6 +- .../nornir_plays/command_getter.py | 3 +- .../nornir_plays/formatter.py | 102 ++++++++++++------ 3 files changed, 72 insertions(+), 39 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml index a24cad2f..359115d2 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml @@ -2,9 +2,9 @@ device_onboarding: hostname: commands: - command: "show version" - use_textfsm: true - jpath: "[*].hostname" + - command: "show version" + use_textfsm: true + jpath: "[*].hostname" serial: commands: - command: "show inventory" diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index bb9eb727..b5977248 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -62,6 +62,7 @@ def _get_commands_to_run(yaml_parsed_info): if isinstance(nested_command_info, dict): for command in nested_command_info["commands"]: all_commands.append(command) + print(f"all_commands: {all_commands}") return deduplicate_command_list(all_commands) @@ -218,5 +219,5 @@ def command_getter_ni(job_result, log_level, kwargs): logger.info("Error: %s", err) return err compiled_results = format_results(compiled_results) - print(f"compiled_results: {compiled_results}") + return compiled_results diff --git a/nautobot_device_onboarding/nornir_plays/formatter.py b/nautobot_device_onboarding/nornir_plays/formatter.py index ebfc24d3..40800a9b 100755 --- a/nautobot_device_onboarding/nornir_plays/formatter.py +++ b/nautobot_device_onboarding/nornir_plays/formatter.py @@ -1,13 +1,12 @@ """Formatter.""" import json - from django.template import engines from django.utils.module_loading import import_string from jdiff import extract_data_from_json from jinja2.sandbox import SandboxedEnvironment -from nautobot.dcim.models import Device from netutils.interface import canonical_interface_name +from nautobot.dcim.models import Device from nautobot_device_onboarding.constants import INTERFACE_TYPE_MAP_STATIC @@ -93,13 +92,13 @@ def extract_show_data(host, multi_result, command_getter_type): final_result_dict = {} for default_dict_field, command_info in command_jpaths[command_getter_type].items(): if command_info.get("commands"): - # Means their isn't any "nested" structures. Therefore not expected to see "validator_pattern key" + # Means there isn't any "nested" structures. Therefore not expected to see "validator_pattern key" if isinstance(command_info["commands"], dict): command_info["commands"] = [command_info["commands"]] result = perform_data_extraction(host, default_dict_field, command_info, jinja_env, multi_result[0]) final_result_dict.update(result) else: - # Means their is a "nested" structures. Priority + # Means there is a "nested" structures. Priority for dict_field, nested_command_info in command_info.items(): if isinstance(nested_command_info["commands"], dict): nested_command_info["commands"] = [nested_command_info["commands"]] @@ -150,6 +149,7 @@ def format_ios_results(device): vrf_list = [] else: vrf_list = ensure_list(vrfs) + interface_dict = {} for item in mtu_list: interface_dict.setdefault(item["interface"], {})["mtu"] = item["mtu"] @@ -170,18 +170,13 @@ def format_ios_results(device): interface_dict.setdefault(item["interface"], {})["link_status"] = ( True if item["link_status"] == "up" else False ) + + # Add default values to interface + default_values = {"lag": "", "untagged_vlan": {}, "tagged_vlans": [], "vrf": {}, "802.1Q_mode": ""} + for interface in interface_dict.values(): - interface.setdefault("vrf", {}) - for vrf in vrf_list: - for interface in vrf["interfaces"]: - canonical_name = canonical_interface_name(interface) - interface_dict.setdefault(canonical_name, {}) - interface_dict[canonical_name]["vrf"] = {"name": vrf["name"], "rd": vrf["default_rd"]} - for interface in interface_dict.values(): - interface.setdefault("802.1Q_mode", "") - interface.setdefault("lag", "") - interface.setdefault("untagged_vlan", {}) - interface.setdefault("tagged_vlans", []) + for key, default in default_values.items(): + interface.setdefault(key, default) for interface, data in interface_dict.items(): ip_addresses = data.get("ip_addresses", {}) @@ -193,8 +188,25 @@ def format_ios_results(device): for interface_name, interface_info in interface_dict.items(): interface_list.append({canonical_interface_name(interface_name): interface_info}) + # Change VRF interface names to canonical and create dict of interfaces + for vrf in vrf_list: + for interface in vrf["interfaces"]: + canonical_name = canonical_interface_name(interface) + if canonical_name.startswith("VLAN"): + canonical_name = canonical_name.replace("VLAN", "Vlan", 1) + interface_dict.setdefault(canonical_name, {}) + if vrf["default_rd"] in ["", ":"]: + + interface_dict[canonical_name]["vrf"] = {} + else: + interface_dict[canonical_name]["vrf"] = { + "name": vrf["name"], + "rd": vrf["default_rd"], + } + device["interfaces"] = interface_list device["serial"] = serial + try: del device["mtu"] del device["type"] @@ -213,22 +225,6 @@ def format_ios_results(device): return device -def format_nxos_vrf_results(device): - """Format the show commands to get interface and rd.""" - try: - vrf_interface_list = device.get("vrf_interfaces", []) - vrf_rd_list = device.get("vrf_rds", []) - - # dict2 = {item["id"]: item for item in list2} # jeff commented out since it wasn't used at all. - - for id in vrf_interface_list: - id.update(vrf_rd_list.get(id["id"], {})) - print(f"vrf_interface_list {vrf_interface_list}") - except Exception: - device = {"failed": True, "failed_reason": f"Formatting error for device {device}"} - return vrf_interface_list - - def format_nxos_results(device): """Format the results of the show commands for NX-OS devices.""" try: @@ -243,7 +239,7 @@ def format_nxos_results(device): modes = device.get("mode", []) vrfs_rd = device.get("vrf_rds", []) vrfs_interfaces = device.get("vrf_interfaces", []) - print(f"vrfs_rd {vrfs_rd}, vrf_interfaces {vrfs_interfaces}") + mtu_list = ensure_list(mtus) type_list = ensure_list(types) ip_list = ensure_list(ips) @@ -253,6 +249,15 @@ def format_nxos_results(device): link_status_list = ensure_list(link_statuses) mode_list = ensure_list(modes) + if vrfs_rd is None: + vrfs_rds = [] + else: + vrfs_rds = ensure_list(vrfs_rd) + if vrfs_interfaces is None: + vrfs_interfaces = [] + else: + vrfs_interfaces = ensure_list(vrfs_interfaces) + interface_dict = {} for item in mtu_list: interface_dict.setdefault(item["interface"], {})["mtu"] = item["mtu"] @@ -278,10 +283,11 @@ def format_nxos_results(device): "access" if item["mode"] == "access" else "tagged" if item["mode"] == "trunk" else "" ) + default_values = {"lag": "", "untagged_vlan": {}, "tagged_vlans": [], "vrf": {}} + for interface in interface_dict.values(): - interface.setdefault("lag", "") - interface.setdefault("untagged_vlan", {}) - interface.setdefault("tagged_vlans", []) + for key, default in default_values.items(): + interface.setdefault(key, default) for interface, data in interface_dict.items(): ip_addresses = data.get("ip_addresses", {}) @@ -293,6 +299,29 @@ def format_nxos_results(device): for interface_name, interface_info in interface_dict.items(): interface_list.append({canonical_interface_name(interface_name): interface_info}) + # Populate vrf_dict from commands and add to interface_dict + vrf_dict = {vrf["id"]: vrf for vrf in vrfs_rds} + + for interface in vrfs_interfaces: + vrf_id = interface["id"] + if "interfaces" not in vrf_dict[vrf_id]: + vrf_dict[vrf_id]["interfaces"] = [] + vrf_dict[vrf_id]["interfaces"].append(interface["interface"]) + + vrf_list = list(vrf_dict.values()) + for vrf in vrf_list: + if "interfaces" in vrf: + for interface in vrf["interfaces"]: + canonical_name = canonical_interface_name(interface) + if canonical_name.startswith("VLAN"): + canonical_name = canonical_name.replace("VLAN", "Vlan", 1) + interface_dict.setdefault(canonical_name, {}) + if vrf["default_rd"] == "0:0": + print(f"RD {vrf['name']} configured but no route set for interface {canonical_name}.") + interface_dict[canonical_name]["vrf"] = {} + else: + interface_dict[canonical_name]["vrf"] = {"name": vrf["name"], "rd": vrf["default_rd"]} + device["interfaces"] = interface_list device["serial"] = serial try: @@ -304,6 +333,9 @@ def format_nxos_results(device): del device["description"] del device["link_status"] del device["mode"] + del device["vrf_rds"] + del device["vrf_interfaces"] + except KeyError: device = {"failed": True, "failed_reason": f"Formatting error for device {device}"} except Exception: From 8095902d4bfc4fc3078921699ae6a0ad613ca28d Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Tue, 9 Apr 2024 17:54:54 +0000 Subject: [PATCH 199/225] removed exception for vrf without rd --- .../nornir_plays/formatter.py | 18 +++++------------- 1 file changed, 5 insertions(+), 13 deletions(-) diff --git a/nautobot_device_onboarding/nornir_plays/formatter.py b/nautobot_device_onboarding/nornir_plays/formatter.py index 40800a9b..cb7b4d5c 100755 --- a/nautobot_device_onboarding/nornir_plays/formatter.py +++ b/nautobot_device_onboarding/nornir_plays/formatter.py @@ -195,14 +195,10 @@ def format_ios_results(device): if canonical_name.startswith("VLAN"): canonical_name = canonical_name.replace("VLAN", "Vlan", 1) interface_dict.setdefault(canonical_name, {}) - if vrf["default_rd"] in ["", ":"]: - - interface_dict[canonical_name]["vrf"] = {} - else: - interface_dict[canonical_name]["vrf"] = { - "name": vrf["name"], - "rd": vrf["default_rd"], - } + interface_dict[canonical_name]["vrf"] = { + "name": vrf["name"], + "rd": vrf["default_rd"], + } device["interfaces"] = interface_list device["serial"] = serial @@ -316,11 +312,7 @@ def format_nxos_results(device): if canonical_name.startswith("VLAN"): canonical_name = canonical_name.replace("VLAN", "Vlan", 1) interface_dict.setdefault(canonical_name, {}) - if vrf["default_rd"] == "0:0": - print(f"RD {vrf['name']} configured but no route set for interface {canonical_name}.") - interface_dict[canonical_name]["vrf"] = {} - else: - interface_dict[canonical_name]["vrf"] = {"name": vrf["name"], "rd": vrf["default_rd"]} + interface_dict[canonical_name]["vrf"] = {"name": vrf["name"], "rd": vrf["default_rd"]} device["interfaces"] = interface_list device["serial"] = serial From 105fd4d16eb83608b0bc6b94aac02ab58c0e2675 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Tue, 9 Apr 2024 13:00:17 -0500 Subject: [PATCH 200/225] first DO for WLC --- .../command_mappers/cisco_wlc_ssh.yml | 27 +++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100755 nautobot_device_onboarding/command_mappers/cisco_wlc_ssh.yml diff --git a/nautobot_device_onboarding/command_mappers/cisco_wlc_ssh.yml b/nautobot_device_onboarding/command_mappers/cisco_wlc_ssh.yml new file mode 100755 index 00000000..6104cbd3 --- /dev/null +++ b/nautobot_device_onboarding/command_mappers/cisco_wlc_ssh.yml @@ -0,0 +1,27 @@ +--- +device_onboarding: + hostname: + commands: + - command: "show sysinfo" + use_textfsm: true + jpath: "[*].system_name" + serial: + commands: + - command: "show inventory" + use_textfsm: true + jpath: "[*].sn" + device_type: + commands: + - command: "show inventory" + use_textfsm: true + jpath: "[*].pid" + mgmt_interface: + commands: + - command: "show interface detailed management" + use_textfsm: true + jpath: "[*].interface_name" + mask_length: + commands: + - command: "show interface detailed management" + use_textfsm: true + jpath: "[*].netmask" From e97fc9e95227e8f4283aa3b1441d2e83c8740ff0 Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Tue, 9 Apr 2024 18:42:43 +0000 Subject: [PATCH 201/225] added 802.1q support for ios --- .../command_mappers/cisco_ios.yml | 5 +++++ .../command_mappers/cisco_xe.yml | 5 +++++ .../nornir_plays/formatter.py | 19 ++++++++++++++++++- 3 files changed, 28 insertions(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/command_mappers/cisco_ios.yml b/nautobot_device_onboarding/command_mappers/cisco_ios.yml index 4a550896..eb2705ce 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_ios.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_ios.yml @@ -80,3 +80,8 @@ network_importer: - command: "show vrf" use_textfsm: true jpath: "[*].{name: name, default_rd: default_rd, interfaces: interfaces}" + mode: + commands: + - command: "show interface status" + use_textfsm: true + jpath: "[*].{interface: port, vlan_id: vlan_id}" \ No newline at end of file diff --git a/nautobot_device_onboarding/command_mappers/cisco_xe.yml b/nautobot_device_onboarding/command_mappers/cisco_xe.yml index 4a550896..d4ec8334 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_xe.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_xe.yml @@ -80,3 +80,8 @@ network_importer: - command: "show vrf" use_textfsm: true jpath: "[*].{name: name, default_rd: default_rd, interfaces: interfaces}" + mode: + commands: + - command: "show interface status" + use_textfsm: true + jpath: "[*].{interface: port, vlan_id: vlan_id}" diff --git a/nautobot_device_onboarding/nornir_plays/formatter.py b/nautobot_device_onboarding/nornir_plays/formatter.py index cb7b4d5c..55526add 100755 --- a/nautobot_device_onboarding/nornir_plays/formatter.py +++ b/nautobot_device_onboarding/nornir_plays/formatter.py @@ -136,6 +136,7 @@ def format_ios_results(device): descriptions = device.get("description", []) link_statuses = device.get("link_status", []) vrfs = device.get("vrfs", []) + mode = device.get("mode", []) # Some data may come across as a dict, needs to be list. Probably should do this elsewhere. mtu_list = ensure_list(mtus) @@ -145,6 +146,8 @@ def format_ios_results(device): mac_list = ensure_list(macs) description_list = ensure_list(descriptions) link_status_list = ensure_list(link_statuses) + mode_list = ensure_list(mode) + if vrfs is None: vrf_list = [] else: @@ -172,7 +175,20 @@ def format_ios_results(device): ) # Add default values to interface - default_values = {"lag": "", "untagged_vlan": {}, "tagged_vlans": [], "vrf": {}, "802.1Q_mode": ""} + default_values = { + "lag": "", + "untagged_vlan": {}, + "tagged_vlans": [], + "vrf": {}, + "802.1Q_mode": "", + } + + for item in mode_list: + print(f"item: {item}, {mode_list}") + canonical_name = canonical_interface_name(item["interface"]) + print(canonical_name) + interface_dict.setdefault(canonical_name, {}) + interface_dict[canonical_name]["802.1Q_mode"] = "tagged" if item["vlan_id"] == "trunk" else "access" for interface in interface_dict.values(): for key, default in default_values.items(): @@ -212,6 +228,7 @@ def format_ios_results(device): del device["description"] del device["link_status"] del device["vrfs"] + del device["mode"] except KeyError: device = {"failed": True, "failed_reason": f"Formatting error 2 for device {device}"} From 5bda142eb7070e9b82b3977f9cb1a7018b85a24a Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Tue, 9 Apr 2024 16:14:24 -0500 Subject: [PATCH 202/225] fix / add needed info for wlc --- .../command_mappers/{cisco_wlc_ssh.yml => cisco_wlc.yml} | 0 nautobot_device_onboarding/constants.py | 1 + 2 files changed, 1 insertion(+) rename nautobot_device_onboarding/command_mappers/{cisco_wlc_ssh.yml => cisco_wlc.yml} (100%) diff --git a/nautobot_device_onboarding/command_mappers/cisco_wlc_ssh.yml b/nautobot_device_onboarding/command_mappers/cisco_wlc.yml similarity index 100% rename from nautobot_device_onboarding/command_mappers/cisco_wlc_ssh.yml rename to nautobot_device_onboarding/command_mappers/cisco_wlc.yml diff --git a/nautobot_device_onboarding/constants.py b/nautobot_device_onboarding/constants.py index d278c9c8..cd5eb4ed 100644 --- a/nautobot_device_onboarding/constants.py +++ b/nautobot_device_onboarding/constants.py @@ -14,6 +14,7 @@ "arista_eos": "eos", "juniper_junos": "junos", "cisco_xr": "iosxr", + "cisco_wlc": "cisco_wlc", } # This should potentially be removed and used nautobot core directly choices. From 498029e60745090a5d3ebf7efc17daddab0f2320 Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Wed, 10 Apr 2024 22:35:16 +0000 Subject: [PATCH 203/225] updates for vlans ios --- .../command_mappers/cisco_ios.yml | 16 ++++-- .../command_mappers/cisco_xe.yml | 16 ++++-- .../nornir_plays/formatter.py | 50 ++++++++++++++++--- 3 files changed, 70 insertions(+), 12 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/cisco_ios.yml b/nautobot_device_onboarding/command_mappers/cisco_ios.yml index eb2705ce..5086c7d2 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_ios.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_ios.yml @@ -80,8 +80,18 @@ network_importer: - command: "show vrf" use_textfsm: true jpath: "[*].{name: name, default_rd: default_rd, interfaces: interfaces}" - mode: + # mode: + # commands: + # - command: "show interfaces switchport" + # use_textfsm: true + # jpath: "[*].{interface: interface, mode: mode, admin_mode: admin_mode}" + vlans: commands: - - command: "show interface status" + - command: "show vlan" use_textfsm: true - jpath: "[*].{interface: port, vlan_id: vlan_id}" \ No newline at end of file + jpath: "[*].{interfaces: interfaces, vlan_name: vlan_name, vlan_id: vlan_id, status: status}" + interface_vlans: + commands: + - command: "show interfaces switchport" + use_textfsm: true + jpath: "[*].{interface: interface, admin_mode: admin_mode, access_vlan: access_vlan, native_vlan: native_vlan, trunking_vlans: trunking_vlans, voice_vlan: voice_vlan}" \ No newline at end of file diff --git a/nautobot_device_onboarding/command_mappers/cisco_xe.yml b/nautobot_device_onboarding/command_mappers/cisco_xe.yml index d4ec8334..b0f3094d 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_xe.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_xe.yml @@ -80,8 +80,18 @@ network_importer: - command: "show vrf" use_textfsm: true jpath: "[*].{name: name, default_rd: default_rd, interfaces: interfaces}" - mode: + # mode: + # commands: + # - command: "show interfaces switchport" + # use_textfsm: true + # jpath: "[*].{interface: interface, mode: mode, admin_mode: admin_mode}" + vlans: commands: - - command: "show interface status" + - command: "show vlan" use_textfsm: true - jpath: "[*].{interface: port, vlan_id: vlan_id}" + jpath: "[*].{interfaces: interfaces, vlan_name: vlan_name, vlan_id: vlan_id, status: status}" + interface_vlans: + commands: + - command: "show interfaces switchport" + use_textfsm: true + jpath: "[*].{interface: interface, admin_mode: admin_mode, access_vlan: access_vlan, native_vlan: native_vlan, trunking_vlans: trunking_vlans, voice_vlan: voice_vlan}" diff --git a/nautobot_device_onboarding/nornir_plays/formatter.py b/nautobot_device_onboarding/nornir_plays/formatter.py index 55526add..62e0cf56 100755 --- a/nautobot_device_onboarding/nornir_plays/formatter.py +++ b/nautobot_device_onboarding/nornir_plays/formatter.py @@ -136,7 +136,8 @@ def format_ios_results(device): descriptions = device.get("description", []) link_statuses = device.get("link_status", []) vrfs = device.get("vrfs", []) - mode = device.get("mode", []) + vlans = device.get("vlans", []) + interface_vlans = device.get("interface_vlans", []) # Some data may come across as a dict, needs to be list. Probably should do this elsewhere. mtu_list = ensure_list(mtus) @@ -146,7 +147,8 @@ def format_ios_results(device): mac_list = ensure_list(macs) description_list = ensure_list(descriptions) link_status_list = ensure_list(link_statuses) - mode_list = ensure_list(mode) + vlan_list = ensure_list(vlans) + interface_vlan_list = ensure_list(interface_vlans) if vrfs is None: vrf_list = [] @@ -183,12 +185,48 @@ def format_ios_results(device): "802.1Q_mode": "", } - for item in mode_list: - print(f"item: {item}, {mode_list}") + vlan_map = {vlan["vlan_id"]: vlan["vlan_name"] for vlan in vlan_list} + + for item in interface_vlan_list: canonical_name = canonical_interface_name(item["interface"]) - print(canonical_name) interface_dict.setdefault(canonical_name, {}) - interface_dict[canonical_name]["802.1Q_mode"] = "tagged" if item["vlan_id"] == "trunk" else "access" + mode = item["admin_mode"] + trunking_vlans = item["trunking_vlans"] + + if mode == "trunk" and trunking_vlans == ["ALL"]: + interface_dict[canonical_name]["802.1Q_mode"] = "tagged-all" + interface_dict[canonical_name]["untagged_vlan"] = { + "name": vlan_map[item["native_vlan"]], + "id": item["native_vlan"], + } + elif mode == "static access": + interface_dict[canonical_name]["802.1Q_mode"] = "access" + interface_dict[canonical_name]["untagged_vlan"] = { + "name": vlan_map[item["access_vlan"]], + "id": item["access_vlan"], + } + elif mode == "trunk" and trunking_vlans != "['ALL']": + interface_dict[canonical_name]["802.1Q_mode"] = "tagged" + tagged_vlans = [] + for vlan_id in trunking_vlans[0].split(","): + if "-" in vlan_id: + start, end = map(int, vlan_id.split("-")) + for id in range(start, end + 1): + if str(id) not in vlan_map: + print(f"Error: VLAN {id} found on interface, but not found in vlan db.") + else: + tagged_vlans.append({"name": vlan_map[str(id)], "id": str(id)}) + if vlan_id not in vlan_map: + print(f"Error: VLAN {vlan_id} found on interface, but not found in vlan db.") + else: + tagged_vlans.append({"name": vlan_map[vlan_id], "id": vlan_id}) + interface_dict[canonical_name]["tagged_vlans"] = tagged_vlans + interface_dict[canonical_name]["untagged_vlan"] = { + "name": vlan_map[item["native_vlan"]], + "id": item["native_vlan"], + } + else: + interface_dict[canonical_name]["802.1Q_mode"] = "" for interface in interface_dict.values(): for key, default in default_values.items(): From 5229b44a72ee40d16e29997bd2a2ac1a3689f55f Mon Sep 17 00:00:00 2001 From: David Cates Date: Thu, 11 Apr 2024 10:33:29 -0700 Subject: [PATCH 204/225] adjust vrf adapter to load an RD of "None" if not present --- .../diffsync/adapters/network_importer_adapters.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index 49eb64c9..1523ab46 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -213,7 +213,7 @@ def load_vrfs(self): network_vrf = self.vrf( diffsync=self, name=vrf.name, - rd=vrf.rd if vrf.rd else "", + rd=vrf.rd if vrf.rd else None, namespace__name=vrf.namespace.name, ) try: @@ -452,6 +452,8 @@ def load_interface(self, hostname, interface_name, interface_data): def load_ip_addresses(self): """Load IP addresses into the DiffSync store.""" for hostname, device_data in self.job.command_getter_result.items(): # pylint: disable=too-many-nested-blocks + if self.job.debug: + self.job.logger.debug(f"Loading IP Addresses from {hostname}") for interface in device_data["interfaces"]: for interface_name, interface_data in interface.items(): for ip_address in interface_data["ip_addresses"]: @@ -483,6 +485,8 @@ def load_vlans(self): location_names[device.name] = device.location.name for hostname, device_data in self.job.command_getter_result.items(): # pylint: disable=too-many-nested-blocks + if self.job.debug: + self.job.logger.debug(f"Loading Vlans from {hostname}") for interface in device_data["interfaces"]: for _, interface_data in interface.items(): # add tagged vlans @@ -517,13 +521,15 @@ def load_vlans(self): def load_vrfs(self): """Load vrfs into the Diffsync store.""" for hostname, device_data in self.job.command_getter_result.items(): # pylint: disable=too-many-nested-blocks + if self.job.debug: + self.job.logger.debug(f"Loading Vrfs from {hostname}") for interface in device_data["interfaces"]: for _, interface_data in interface.items(): if interface_data["vrf"]: network_vrf = self.vrf( diffsync=self, name=interface_data["vrf"]["name"], - rd=interface_data["vrf"]["rd"] if interface_data["vrf"]["rd"] else "", + rd=interface_data["vrf"]["rd"] if interface_data["vrf"]["rd"] else None, namespace__name=self.job.namespace.name, ) try: From a038168fb04b1618bff98077579df66a6b5e309f Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Thu, 11 Apr 2024 18:08:53 +0000 Subject: [PATCH 205/225] update vrf model --- .../nornir_plays/formatter.py | 32 +++++++++++++++---- 1 file changed, 25 insertions(+), 7 deletions(-) diff --git a/nautobot_device_onboarding/nornir_plays/formatter.py b/nautobot_device_onboarding/nornir_plays/formatter.py index 62e0cf56..bfd1af9c 100755 --- a/nautobot_device_onboarding/nornir_plays/formatter.py +++ b/nautobot_device_onboarding/nornir_plays/formatter.py @@ -176,7 +176,7 @@ def format_ios_results(device): True if item["link_status"] == "up" else False ) - # Add default values to interface + # Add default values to interfaces default_values = { "lag": "", "untagged_vlan": {}, @@ -249,11 +249,17 @@ def format_ios_results(device): if canonical_name.startswith("VLAN"): canonical_name = canonical_name.replace("VLAN", "Vlan", 1) interface_dict.setdefault(canonical_name, {}) - interface_dict[canonical_name]["vrf"] = { - "name": vrf["name"], - "rd": vrf["default_rd"], - } - + if vrf["default_rd"] == "": + interface_dict[canonical_name]["vrf"] = { + "name": vrf["name"], + "rd": "", + } + else: + interface_dict[canonical_name]["vrf"] = { + "name": vrf["name"], + "rd": vrf["default_rd"], + } + print(f"interface_dict: {interface_dict}") device["interfaces"] = interface_list device["serial"] = serial @@ -290,6 +296,8 @@ def format_nxos_results(device): modes = device.get("mode", []) vrfs_rd = device.get("vrf_rds", []) vrfs_interfaces = device.get("vrf_interfaces", []) + vlans = device.get("vlans", []) + interface_vlans = device.get("interface_vlans", []) mtu_list = ensure_list(mtus) type_list = ensure_list(types) @@ -299,6 +307,8 @@ def format_nxos_results(device): description_list = ensure_list(descriptions) link_status_list = ensure_list(link_statuses) mode_list = ensure_list(modes) + vlan_list = ensure_list(vlans) + interface_vlan_list = ensure_list(interface_vlans) if vrfs_rd is None: vrfs_rds = [] @@ -336,6 +346,8 @@ def format_nxos_results(device): default_values = {"lag": "", "untagged_vlan": {}, "tagged_vlans": [], "vrf": {}} + vlan_map = {vlan["vlan_id"]: vlan["vlan_name"] for vlan in vlan_list} + for interface in interface_dict.values(): for key, default in default_values.items(): interface.setdefault(key, default) @@ -367,7 +379,13 @@ def format_nxos_results(device): if canonical_name.startswith("VLAN"): canonical_name = canonical_name.replace("VLAN", "Vlan", 1) interface_dict.setdefault(canonical_name, {}) - interface_dict[canonical_name]["vrf"] = {"name": vrf["name"], "rd": vrf["default_rd"]} + if vrf["default_rd"] == "0:0": + interface_dict[canonical_name]["vrf"] = { + "name": vrf["name"], + "rd": "", + } + else: + interface_dict[canonical_name]["vrf"] = {"name": vrf["name"], "rd": vrf["default_rd"]} device["interfaces"] = interface_list device["serial"] = serial From 5504ed10fc459e9a92c93cc00c96f3e17a8ac116 Mon Sep 17 00:00:00 2001 From: David Cates Date: Thu, 11 Apr 2024 12:39:37 -0700 Subject: [PATCH 206/225] update vrf model for NI --- .../diffsync/models/network_importer_models.py | 9 +++++---- nautobot_device_onboarding/jobs.py | 2 +- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/models/network_importer_models.py b/nautobot_device_onboarding/diffsync/models/network_importer_models.py index 8149732c..1f7372be 100644 --- a/nautobot_device_onboarding/diffsync/models/network_importer_models.py +++ b/nautobot_device_onboarding/diffsync/models/network_importer_models.py @@ -1,6 +1,6 @@ """Diffsync models.""" -from typing import List, Optional +from typing import List, Optional, Union from diffsync import DiffSync, DiffSyncModel from diffsync import exceptions as diffsync_exceptions @@ -459,9 +459,10 @@ class NetworkImporterVRF(FilteredNautobotModel): _modelname = "vrf" _model = VRF - _identifiers = ("rd", "name", "namespace__name") + _identifiers = ("name", "namespace__name") + _attributes = ("rd",) - rd: str + rd: Union[str, None] name: str namespace__name: str @@ -484,7 +485,7 @@ def _get_and_assign_vrf(cls, diffsync, attrs, interface): try: vrf = VRF.objects.get( name=attrs["vrf"]["name"], - rd=attrs["vrf"]["rd"], + rd=attrs["vrf"]["rd"] if attrs["vrf"]["rd"] else None, namespace=diffsync.job.namespace, ) except ObjectDoesNotExist: diff --git a/nautobot_device_onboarding/jobs.py b/nautobot_device_onboarding/jobs.py index 3f047763..204c0181 100755 --- a/nautobot_device_onboarding/jobs.py +++ b/nautobot_device_onboarding/jobs.py @@ -528,7 +528,7 @@ class Meta: name = "Sync Network Data" description = ( "Synchronize extended device attribute information into Nautobot; " - "including Interfaces, IPAddresses, Prefixes, Vlans and Cables." + "including Interfaces, IPAddresses, Prefixes, Vlans and Vrfs." ) debug = BooleanVar(description="Enable for more verbose logging.") From b2ecbac8049b57b810a4460eadb4c30fbbdd914d Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Fri, 12 Apr 2024 12:56:47 -0500 Subject: [PATCH 207/225] add schema and some more new filters and cleanups --- nautobot_device_onboarding/jinja_filters.py | 64 ++++++++++- .../nornir_plays/command_getter.py | 14 +-- .../nornir_plays/empty_inventory.py | 3 +- .../nornir_plays/formatter.py | 4 +- .../nornir_plays/inventory_creator.py | 8 +- .../nornir_plays/schemas.py | 105 ++++++++++++++++++ .../nornir_plays/transform.py | 1 - 7 files changed, 179 insertions(+), 20 deletions(-) create mode 100755 nautobot_device_onboarding/nornir_plays/schemas.py diff --git a/nautobot_device_onboarding/jinja_filters.py b/nautobot_device_onboarding/jinja_filters.py index a9270526..59ebf9a6 100755 --- a/nautobot_device_onboarding/jinja_filters.py +++ b/nautobot_device_onboarding/jinja_filters.py @@ -28,9 +28,67 @@ def fix_interfaces(interfaces): int_values["ip_addresses"].append( {"ip_address": int_values.get("ip_address", ""), "prefix_length": int_values.get("prefix_length", "")} ) - if "up" in int_values["link_status"]: - int_values["link_status"] = True + if "up" in int_values["jeff_status"]: + int_values["jeff_status"] = True else: - int_values["link_status"] = False + int_values["jeff_status"] = False return interfaces + + +@library.filter +def collapse_list_to_dict(original_data): + """Takes a list of dictionaries and creates a dictionary based on outtermost key + + Args: + original_data (list): list of dictionaries + root_key (str): dictionary key to use as the root key + + Example: + >>> example_data = [ + {'GigabitEthernet1': {'jeff_status': 'up'}}, + {'GigabitEthernet2': {'jeff_status': 'administratively down'}}, + {'GigabitEthernet3': {'jeff_status': 'administratively down'}}, + {'GigabitEthernet4': {'jeff_status': 'administratively down'}}, + {'Loopback0': {'jeff_status': 'administratively down'}}, + {'Loopback2': {'jeff_status': 'administratively down'}}, + {'Port-channel1': {'jeff_status': 'down'}} + ] + >>> collapse_list_to_dict(example_data) + {'GigabitEthernet1': {'jeff_status': 'up'}, + 'GigabitEthernet2': {'jeff_status': 'administratively down'}, + 'GigabitEthernet3': {'jeff_status': 'administratively down'}, + 'GigabitEthernet4': {'jeff_status': 'administratively down'}, + 'Loopback0': {'jeff_status': 'administratively down'}, + 'Loopback2': {'jeff_status': 'administratively down'}, + 'Port-channel1': {'jeff_status': 'down'}} + """ + return {root_key: data for data in original_data for root_key, data in data.items()} + + +def merge_dicts(*dicts): + """Merges any number of dictionaries recursively, handling nested dictionaries. + + Args: + *dicts: A variable number of dictionaries to merge. + + Returns: + A new dictionary containing the merged data from all dictionaries. + """ + if not dicts: + return {} # Empty input returns an empty dictionary + merged = dicts[0].copy() + for other_dict in dicts[1:]: + if not other_dict: + continue # Skip empty dictionaries + for key, value in other_dict.items(): + if key in merged: + if isinstance(value, dict) and isinstance(merged[key], dict): + # Recursively merge nested dictionaries + merged[key] = merge_dicts(merged[key], value) + else: + # Overwrite existing values with values from subsequent dictionaries (giving priority to later ones) + merged[key] = value + # Add new key-value pairs from subsequent dictionaries + merged[key] = value + return merged diff --git a/nautobot_device_onboarding/nornir_plays/command_getter.py b/nautobot_device_onboarding/nornir_plays/command_getter.py index b5977248..4b4d8528 100755 --- a/nautobot_device_onboarding/nornir_plays/command_getter.py +++ b/nautobot_device_onboarding/nornir_plays/command_getter.py @@ -1,6 +1,5 @@ """CommandGetter.""" -# pylint: disable=relative-beyond-top-level from typing import Dict from django.conf import settings @@ -48,7 +47,7 @@ def deduplicate_command_list(data): def _get_commands_to_run(yaml_parsed_info): - """Load yaml file and look up all commands that need to be run.""" + """Using merged command mapper info and look up all commands that need to be run.""" all_commands = [] for _, value in yaml_parsed_info.items(): # Deduplicate commands + parser key @@ -62,7 +61,6 @@ def _get_commands_to_run(yaml_parsed_info): if isinstance(nested_command_info, dict): for command in nested_command_info["commands"]: all_commands.append(command) - print(f"all_commands: {all_commands}") return deduplicate_command_list(all_commands) @@ -74,6 +72,8 @@ def netmiko_send_commands(task: Task, command_getter_yaml_data: Dict, command_ge return Result(host=task.host, result=f"{task.host.name} has a unsupported platform set.", failed=True) task.host.data["platform_parsing_info"] = command_getter_yaml_data[task.host.platform] commands = _get_commands_to_run(command_getter_yaml_data[task.host.platform][command_getter_job]) + # Appears all commands in this for loop are already within 1 connection. + # task.host.open_connection("netmiko", configuration=task.nornir.config) for command in commands: try: task.run( @@ -90,6 +90,7 @@ def netmiko_send_commands(task: Task, command_getter_yaml_data: Dict, command_ge result=f"{command['command']}: E0001 - Textfsm template issue.", failed=True, ) + # task.host.close_connection("netmiko") def _parse_credentials(credentials): @@ -109,9 +110,9 @@ def _parse_credentials(credentials): access_type=SecretsGroupAccessTypeChoices.TYPE_GENERIC, secret_type=SecretsGroupSecretTypeChoices.TYPE_SECRET, ) - except Exception: + except Exception: # pylint: disable=broad-exception-caught secret = None - except Exception: + except Exception: # pylint: disable=broad-exception-caught return (None, None, None) else: username = settings.NAPALM_USERNAME @@ -137,7 +138,6 @@ def command_getter_do(job_result, log_level, kwargs): # Initiate Nornir instance with empty inventory try: - logger = NornirLogger(job_result, log_level=0) compiled_results = {} with InitNornir( runner=NORNIR_SETTINGS.get("runner"), @@ -185,7 +185,7 @@ def command_getter_do(job_result, log_level, kwargs): command_getter_job="device_onboarding", ) except Exception as err: # pylint: disable=broad-exception-caught - logger.error(err) + logger.info("Error: %s", err) return compiled_results diff --git a/nautobot_device_onboarding/nornir_plays/empty_inventory.py b/nautobot_device_onboarding/nornir_plays/empty_inventory.py index 4da4e15e..7c098c30 100755 --- a/nautobot_device_onboarding/nornir_plays/empty_inventory.py +++ b/nautobot_device_onboarding/nornir_plays/empty_inventory.py @@ -1,11 +1,10 @@ """Empty Nornir Inventory Plugin.""" from nornir.core.inventory import Defaults, Groups, Hosts, Inventory - from nautobot_device_onboarding.nornir_plays.transform import add_platform_parsing_info -class EmptyInventory: +class EmptyInventory: # pylint: disable=too-few-public-methods """Creates an empty Nornir inventory.""" def load(self) -> Inventory: diff --git a/nautobot_device_onboarding/nornir_plays/formatter.py b/nautobot_device_onboarding/nornir_plays/formatter.py index 62e0cf56..96305a64 100755 --- a/nautobot_device_onboarding/nornir_plays/formatter.py +++ b/nautobot_device_onboarding/nornir_plays/formatter.py @@ -423,6 +423,6 @@ def format_results(compiled_results): format_nxos_results(data) else: data.update({"failed": True, "failed_reason": "Cannot connect to device."}) - except Exception as e: - data.update({"failed": True, "failed_reason": f"Error formatting device: {e}"}) + except Exception as err: # pylint: disable=broad-exception-caught + data.update({"failed": True, "failed_reason": f"Error formatting device: {err}"}) return compiled_results diff --git a/nautobot_device_onboarding/nornir_plays/inventory_creator.py b/nautobot_device_onboarding/nornir_plays/inventory_creator.py index 059b3782..de691b45 100755 --- a/nautobot_device_onboarding/nornir_plays/inventory_creator.py +++ b/nautobot_device_onboarding/nornir_plays/inventory_creator.py @@ -6,8 +6,6 @@ def guess_netmiko_device_type(hostname, username, password, port): """Guess the device type of host, based on Netmiko.""" - guessed_device_type = None - netmiko_optional_args = {"port": port} remote_device = { @@ -22,9 +20,9 @@ def guess_netmiko_device_type(hostname, username, password, port): guesser = SSHDetect(**remote_device) guessed_device_type = guesser.autodetect() - except Exception as err: - print(err) - print(f"{hostname} - guessed platform: {guessed_device_type}") + except Exception: # pylint: disable=broad-exception-caught + guessed_device_type = None + # Additional checking is done later in the process. We shouldn't reraise an error as it causes the job to fail. return guessed_device_type diff --git a/nautobot_device_onboarding/nornir_plays/schemas.py b/nautobot_device_onboarding/nornir_plays/schemas.py new file mode 100755 index 00000000..17795bef --- /dev/null +++ b/nautobot_device_onboarding/nornir_plays/schemas.py @@ -0,0 +1,105 @@ +"""General Schemas.""" + + +def device_onboarding_schema(json_schema=True): + """Schema for SSoT Network Device Onboarding.""" + if json_schema: + return { + "title": "Device Onboarding", + "description": "Schema for SSoT Network Device Onboarding", + "type": "object", + "required": ["hostname", "serial", "device_type", "mgmt_interface", "platform", "network_driver"], + "properties": { + "hostname": {"type": "string", "description": "Hostname of the network device"}, + "serial": {"type": "string", "description": "Serial number of the network device"}, + "device_type": {"type": "string", "description": "Type of the network device"}, + "mgmt_interface": {"type": "string", "description": "Management interface of the network device"}, + "mask_length": { + "type": "integer", + "default": 31, + "description": "Subnet mask length for the management interface (default: 31)", + }, + "platform": {"type": "string", "description": "Platform of the network device"}, + "manufacturer": { + "type": "string", + "default": "PLACEHOLDER", + "description": "Manufacturer of the network device (default: PLACEHOLDER)", + }, + "network_driver": {"type": "string", "description": "Network driver used for the device"}, + }, + } + return { + "hostname": "", + "serial": "", + "device_type": "", + "mgmt_interface": "", + "mask_length": 31, + "platform": "", + "manufacturer": "PLACEHOLDER", + "network_driver": "", + } + + +def network_importer_schema(json_schema=True): + """Schema for SSoT Network Network Importer.""" + if json_schema: + return { + "title": "Network Importer", + "description": "Schema for SSoT Network Network Importer", + "type": "object", + "required": ["type", "ip_addresses", "mac_address", "link_status", "802.1Q_mode"], + "properties": { + "type": {"type": "string", "description": "Type of the network interface"}, + "ip_addresses": { + "type": "array", + "minItems": 1, + "items": { + "type": "object", + "required": ["ip_address", "prefix_length"], + "properties": { + "ip_address": {"type": "string", "description": "IP address of the interface"}, + "prefix_length": {"type": "integer", "description": "Prefix length of the IP address"}, + }, + }, + "description": "List of IP addresses associated with the interface", + }, + "mac_address": {"type": "string", "description": "MAC address of the interface"}, + "mtu": {"type": "string", "description": "MTU of the interface"}, + "description": {"type": "string", "description": "Description of the interface"}, + "link_status": {"type": "boolean", "description": "Link status of the interface (up or down)"}, + "802.1Q_mode": {"type": "string", "description": "802.1Q mode of the interface (access, trunk, etc.)"}, + "lag": { + "type": "string", + "description": "LAG (Link Aggregation Group) the interface belongs to (optional)", + }, + "untagged_vlan": {"type": "object", "description": "Untagged VLAN information (optional)"}, + "tagged_vlans": { + "type": "array", + "items": { + "type": "object", + "required": ["name", "id"], + "properties": { + "name": {"type": "string", "description": "Name of the tagged VLAN"}, + "id": {"type": "string", "description": "ID of the tagged VLAN"}, + }, + }, + "description": "List of tagged VLANs associated with the interface (optional)", + }, + }, + } + return { + "type": "str", + "ip_addresses": [ + {"ip_address": "str", "prefix_length": "int"}, + {"ip_address": "str", "prefix_length": "int"}, + ], + "mac_address": "str", + "mtu": "str", + "description": "str", + "link_status": "bool", + "802.1Q_mode": "str", + "lag": "str", + "untagged_vlan": "dict", + "tagged_vlans": [{"name": "str", "id": "str"}, {"name": "str", "id": "str"}], + "vrf": {"name": "str", "rd": "str"}, + } diff --git a/nautobot_device_onboarding/nornir_plays/transform.py b/nautobot_device_onboarding/nornir_plays/transform.py index ad1d9ef1..3bbfef8d 100755 --- a/nautobot_device_onboarding/nornir_plays/transform.py +++ b/nautobot_device_onboarding/nornir_plays/transform.py @@ -1,7 +1,6 @@ """Adds command mapper, platform parsing info.""" import os - import yaml from nautobot.extras.models import GitRepository From 68f629e0143863e070778d5a95a1cad7a32057ca Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Fri, 12 Apr 2024 20:39:59 +0000 Subject: [PATCH 208/225] update for handling --- .../nornir_plays/formatter.py | 479 ++++++++++-------- 1 file changed, 270 insertions(+), 209 deletions(-) diff --git a/nautobot_device_onboarding/nornir_plays/formatter.py b/nautobot_device_onboarding/nornir_plays/formatter.py index bfd1af9c..c2bf6092 100755 --- a/nautobot_device_onboarding/nornir_plays/formatter.py +++ b/nautobot_device_onboarding/nornir_plays/formatter.py @@ -126,73 +126,77 @@ def ensure_list(data): def format_ios_results(device): """Format the results of the show commands for IOS devices.""" - try: - serial = device.get("serial") - mtus = device.get("mtu", []) - types = device.get("type", []) - ips = device.get("ip_addresses", []) - prefixes = device.get("prefix_length", []) - macs = device.get("mac_address", []) - descriptions = device.get("description", []) - link_statuses = device.get("link_status", []) - vrfs = device.get("vrfs", []) - vlans = device.get("vlans", []) - interface_vlans = device.get("interface_vlans", []) - - # Some data may come across as a dict, needs to be list. Probably should do this elsewhere. - mtu_list = ensure_list(mtus) - type_list = ensure_list(types) - ip_list = ensure_list(ips) - prefix_list = ensure_list(prefixes) - mac_list = ensure_list(macs) - description_list = ensure_list(descriptions) - link_status_list = ensure_list(link_statuses) - vlan_list = ensure_list(vlans) - interface_vlan_list = ensure_list(interface_vlans) - - if vrfs is None: - vrf_list = [] - else: - vrf_list = ensure_list(vrfs) - - interface_dict = {} - for item in mtu_list: - interface_dict.setdefault(item["interface"], {})["mtu"] = item["mtu"] - for item in type_list: - interface_type = map_interface_type(item["type"]) - interface_dict.setdefault(item["interface"], {})["type"] = interface_type - for item in ip_list: - interface_dict.setdefault(item["interface"], {})["ip_addresses"] = {"ip_address": item["ip_address"]} - for item in prefix_list: - interface_dict.setdefault(item["interface"], {}).setdefault("ip_addresses", {})["prefix_length"] = item[ - "prefix_length" - ] - for item in mac_list: - interface_dict.setdefault(item["interface"], {})["mac_address"] = item["mac_address"] - for item in description_list: - interface_dict.setdefault(item["interface"], {})["description"] = item["description"] - for item in link_status_list: - interface_dict.setdefault(item["interface"], {})["link_status"] = ( - True if item["link_status"] == "up" else False - ) - - # Add default values to interfaces - default_values = { - "lag": "", - "untagged_vlan": {}, - "tagged_vlans": [], - "vrf": {}, - "802.1Q_mode": "", - } - - vlan_map = {vlan["vlan_id"]: vlan["vlan_name"] for vlan in vlan_list} - - for item in interface_vlan_list: + serial = device.get("serial") + mtus = device.get("mtu", []) + types = device.get("type", []) + ips = device.get("ip_addresses", []) + prefixes = device.get("prefix_length", []) + macs = device.get("mac_address", []) + descriptions = device.get("description", []) + link_statuses = device.get("link_status", []) + vrfs = device.get("vrfs", []) + vlans = device.get("vlans", []) + interface_vlans = device.get("interface_vlans", []) + + # Some data may come across as a dict, needs to be list. Probably should do this elsewhere. + mtu_list = ensure_list(mtus) + type_list = ensure_list(types) + ip_list = ensure_list(ips) + prefix_list = ensure_list(prefixes) + mac_list = ensure_list(macs) + description_list = ensure_list(descriptions) + link_status_list = ensure_list(link_statuses) + vlan_list = ensure_list(vlans) + interface_vlan_list = ensure_list(interface_vlans) + + if vrfs is None: + vrf_list = [] + else: + vrf_list = ensure_list(vrfs) + + interface_dict = {} + for item in mtu_list: + interface_dict.setdefault(item["interface"], {})["mtu"] = item["mtu"] + for item in type_list: + interface_type = map_interface_type(item["type"]) + interface_dict.setdefault(item["interface"], {})["type"] = interface_type + + for item in ip_list: + interface_dict.setdefault(item["interface"], {})["ip_addresses"] = {"ip_address": item["ip_address"]} + + for item in prefix_list: + interface_dict.setdefault(item["interface"], {}).setdefault("ip_addresses", {})["prefix_length"] = item[ + "prefix_length" + ] + for item in mac_list: + interface_dict.setdefault(item["interface"], {})["mac_address"] = ( + item["mac_address"] if item["mac_address"] else "" + ) + for item in description_list: + interface_dict.setdefault(item["interface"], {})["description"] = ( + item["description"] if item["description"] else "" + ) + for item in link_status_list: + interface_dict.setdefault(item["interface"], {})["link_status"] = True if item["link_status"] == "up" else False + + # Add default values to interfaces + default_values = { + "lag": "", + "untagged_vlan": {}, + "tagged_vlans": [], + "vrf": {}, + "802.1Q_mode": "", + } + + vlan_map = {vlan["vlan_id"]: vlan["vlan_name"] for vlan in vlan_list} + for item in interface_vlan_list: + try: + if not item["interface"]: + continue canonical_name = canonical_interface_name(item["interface"]) interface_dict.setdefault(canonical_name, {}) mode = item["admin_mode"] trunking_vlans = item["trunking_vlans"] - if mode == "trunk" and trunking_vlans == ["ALL"]: interface_dict[canonical_name]["802.1Q_mode"] = "tagged-all" interface_dict[canonical_name]["untagged_vlan"] = { @@ -205,10 +209,11 @@ def format_ios_results(device): "name": vlan_map[item["access_vlan"]], "id": item["access_vlan"], } - elif mode == "trunk" and trunking_vlans != "['ALL']": + elif mode == "trunk" and trunking_vlans != ["ALL"]: interface_dict[canonical_name]["802.1Q_mode"] = "tagged" tagged_vlans = [] for vlan_id in trunking_vlans[0].split(","): + print(f"vlan_id: {vlan_id}") if "-" in vlan_id: start, end = map(int, vlan_id.split("-")) for id in range(start, end + 1): @@ -216,10 +221,12 @@ def format_ios_results(device): print(f"Error: VLAN {id} found on interface, but not found in vlan db.") else: tagged_vlans.append({"name": vlan_map[str(id)], "id": str(id)}) - if vlan_id not in vlan_map: - print(f"Error: VLAN {vlan_id} found on interface, but not found in vlan db.") else: - tagged_vlans.append({"name": vlan_map[vlan_id], "id": vlan_id}) + if vlan_id not in vlan_map: + print(f"Error: VLAN {vlan_id} found on interface, but not found in vlan db.") + else: + tagged_vlans.append({"name": vlan_map[vlan_id], "id": vlan_id}) + interface_dict[canonical_name]["tagged_vlans"] = tagged_vlans interface_dict[canonical_name]["untagged_vlan"] = { "name": vlan_map[item["native_vlan"]], @@ -227,23 +234,27 @@ def format_ios_results(device): } else: interface_dict[canonical_name]["802.1Q_mode"] = "" - - for interface in interface_dict.values(): - for key, default in default_values.items(): - interface.setdefault(key, default) - - for interface, data in interface_dict.items(): - ip_addresses = data.get("ip_addresses", {}) - if ip_addresses: - data["ip_addresses"] = [ip_addresses] - - # Convert interface names to canonical form - interface_list = [] - for interface_name, interface_info in interface_dict.items(): - interface_list.append({canonical_interface_name(interface_name): interface_info}) - - # Change VRF interface names to canonical and create dict of interfaces - for vrf in vrf_list: + except KeyError as e: + print(f"Error: VLAN not found on interface for interface {canonical_name} {e}") + continue + + for interface in interface_dict.values(): + for key, default in default_values.items(): + interface.setdefault(key, default) + + for interface, data in interface_dict.items(): + ip_addresses = data.get("ip_addresses", {}) + if ip_addresses: + data["ip_addresses"] = [ip_addresses] + + # Convert interface names to canonical form + interface_list = [] + for interface_name, interface_info in interface_dict.items(): + interface_list.append({canonical_interface_name(interface_name): interface_info}) + + # Change VRF interface names to canonical and create dict of interfaces + for vrf in vrf_list: + try: for interface in vrf["interfaces"]: canonical_name = canonical_interface_name(interface) if canonical_name.startswith("VLAN"): @@ -259,120 +270,169 @@ def format_ios_results(device): "name": vrf["name"], "rd": vrf["default_rd"], } - print(f"interface_dict: {interface_dict}") - device["interfaces"] = interface_list - device["serial"] = serial - - try: - del device["mtu"] - del device["type"] - del device["ip_addresses"] - del device["prefix_length"] - del device["mac_address"] - del device["description"] - del device["link_status"] - del device["vrfs"] - del device["mode"] - except KeyError: - device = {"failed": True, "failed_reason": f"Formatting error 2 for device {device}"} - except Exception as e: - device = {"failed": True, "failed_reason": f"Formatting error 1 {e} for device {device}"} + print(f"Error: VRF configuration on interface {interface} not as expected.") + continue + + device["interfaces"] = interface_list + device["serial"] = serial + + del device["mtu"] + del device["type"] + del device["ip_addresses"] + del device["prefix_length"] + del device["mac_address"] + del device["description"] + del device["link_status"] + del device["vrfs"] + del device["vlans"] + del device["interface_vlans"] return device def format_nxos_results(device): """Format the results of the show commands for NX-OS devices.""" - try: - serial = device.get("serial") - mtus = device.get("mtu", []) - types = device.get("type", []) - ips = device.get("ip_addresses", []) - prefixes = device.get("prefix_length", []) - macs = device.get("mac_address", []) - descriptions = device.get("description", []) - link_statuses = device.get("link_status", []) - modes = device.get("mode", []) - vrfs_rd = device.get("vrf_rds", []) - vrfs_interfaces = device.get("vrf_interfaces", []) - vlans = device.get("vlans", []) - interface_vlans = device.get("interface_vlans", []) - - mtu_list = ensure_list(mtus) - type_list = ensure_list(types) - ip_list = ensure_list(ips) - prefix_list = ensure_list(prefixes) - mac_list = ensure_list(macs) - description_list = ensure_list(descriptions) - link_status_list = ensure_list(link_statuses) - mode_list = ensure_list(modes) - vlan_list = ensure_list(vlans) - interface_vlan_list = ensure_list(interface_vlans) - - if vrfs_rd is None: - vrfs_rds = [] - else: - vrfs_rds = ensure_list(vrfs_rd) - if vrfs_interfaces is None: - vrfs_interfaces = [] - else: - vrfs_interfaces = ensure_list(vrfs_interfaces) - - interface_dict = {} - for item in mtu_list: - interface_dict.setdefault(item["interface"], {})["mtu"] = item["mtu"] - for item in type_list: - interface_type = map_interface_type(item["type"]) - interface_dict.setdefault(item["interface"], {})["type"] = interface_type - for item in ip_list: - interface_dict.setdefault(item["interface"], {})["ip_addresses"] = {"ip_address": item["ip_address"]} - for item in prefix_list: - interface_dict.setdefault(item["interface"], {}).setdefault("ip_addresses", {})["prefix_length"] = item[ - "prefix_length" - ] - for item in mac_list: - interface_dict.setdefault(item["interface"], {})["mac_address"] = item["mac_address"] - for item in description_list: - interface_dict.setdefault(item["interface"], {})["description"] = item["description"] - for item in link_status_list: - interface_dict.setdefault(item["interface"], {})["link_status"] = ( - True if item["link_status"] == "up" else False - ) - for item in mode_list: - interface_dict.setdefault(item["interface"], {})["802.1Q_mode"] = ( - "access" if item["mode"] == "access" else "tagged" if item["mode"] == "trunk" else "" - ) - - default_values = {"lag": "", "untagged_vlan": {}, "tagged_vlans": [], "vrf": {}} - - vlan_map = {vlan["vlan_id"]: vlan["vlan_name"] for vlan in vlan_list} - - for interface in interface_dict.values(): - for key, default in default_values.items(): - interface.setdefault(key, default) - - for interface, data in interface_dict.items(): - ip_addresses = data.get("ip_addresses", {}) - if ip_addresses: - data["ip_addresses"] = [ip_addresses] - - # Convert interface names to canonical form - interface_list = [] - for interface_name, interface_info in interface_dict.items(): - interface_list.append({canonical_interface_name(interface_name): interface_info}) - - # Populate vrf_dict from commands and add to interface_dict - vrf_dict = {vrf["id"]: vrf for vrf in vrfs_rds} - - for interface in vrfs_interfaces: - vrf_id = interface["id"] - if "interfaces" not in vrf_dict[vrf_id]: - vrf_dict[vrf_id]["interfaces"] = [] - vrf_dict[vrf_id]["interfaces"].append(interface["interface"]) - - vrf_list = list(vrf_dict.values()) - for vrf in vrf_list: + + serial = device.get("serial") + mtus = device.get("mtu", []) + types = device.get("type", []) + ips = device.get("ip_addresses", []) + prefixes = device.get("prefix_length", []) + macs = device.get("mac_address", []) + descriptions = device.get("description", []) + link_statuses = device.get("link_status", []) + # modes = device.get("mode", []) + vrfs_rd = device.get("vrf_rds", []) + vrfs_interfaces = device.get("vrf_interfaces", []) + vlans = device.get("vlans", []) + interface_vlans = device.get("interface_vlans", []) + + mtu_list = ensure_list(mtus) + type_list = ensure_list(types) + ip_list = ensure_list(ips) + prefix_list = ensure_list(prefixes) + mac_list = ensure_list(macs) + description_list = ensure_list(descriptions) + link_status_list = ensure_list(link_statuses) + # mode_list = ensure_list(modes) + vlan_list = ensure_list(vlans) + interface_vlan_list = ensure_list(interface_vlans) + + if vrfs_rd is None: + vrfs_rds = [] + else: + vrfs_rds = ensure_list(vrfs_rd) + if vrfs_interfaces is None: + vrfs_interfaces = [] + else: + vrfs_interfaces = ensure_list(vrfs_interfaces) + + interface_dict = {} + for item in mtu_list: + interface_dict.setdefault(item["interface"], {})["mtu"] = item["mtu"] + for item in type_list: + interface_type = map_interface_type(item["type"]) + interface_dict.setdefault(item["interface"], {})["type"] = interface_type + for item in ip_list: + interface_dict.setdefault(item["interface"], {})["ip_addresses"] = {"ip_address": item["ip_address"]} + for item in prefix_list: + interface_dict.setdefault(item["interface"], {}).setdefault("ip_addresses", {})["prefix_length"] = item[ + "prefix_length" + ] + for item in mac_list: + interface_dict.setdefault(item["interface"], {})["mac_address"] = item["mac_address"] + for item in description_list: + interface_dict.setdefault(item["interface"], {})["description"] = item["description"] + for item in link_status_list: + interface_dict.setdefault(item["interface"], {})["link_status"] = True if item["link_status"] == "up" else False + # for item in mode_list: + # interface_dict.setdefault(item["interface"], {})["802.1Q_mode"] = ( + # "access" if item["mode"] == "access" else "tagged" if item["mode"] == "trunk" else "" + # ) + + default_values = {"lag": "", "untagged_vlan": {}, "tagged_vlans": [], "vrf": {}, "802.1Q_mode": ""} + + vlan_map = {vlan["vlan_id"]: vlan["vlan_name"] for vlan in vlan_list} + print(f"vlan_map: {vlan_map}, interface_vlan_list: {interface_vlan_list}") + # for item in interface_vlan_list: + # try: + # if not item["interface"]: + # continue + # canonical_name = canonical_interface_name(item["interface"]) + # interface_dict.setdefault(canonical_name, {}) + # mode = item["mode"] + # trunking_vlans = item["trunking_vlans"] + # if mode == "trunk" and trunking_vlans == "1-4094": + # interface_dict[canonical_name]["802.1Q_mode"] = "tagged-all" + # interface_dict[canonical_name]["untagged_vlan"] = { + # "name": vlan_map[item["native_vlan"]], + # "id": item["native_vlan"], + # } + # elif mode == "access": + # interface_dict[canonical_name]["802.1Q_mode"] = "access" + # interface_dict[canonical_name]["untagged_vlan"] = { + # "name": item["access_vlan_name"], + # "id": item["access_vlan"], + # } + # elif mode == "trunk" and trunking_vlans != "1-4094": + # print(f"trunking_vlans: {trunking_vlans}") + # pass + # interface_dict[canonical_name]["802.1Q_mode"] = "tagged" + # tagged_vlans = [] + # trunking_vlans = trunking_vlans.split(",") + # for vlan_id in trunking_vlans: + # print(f"vlan_id: {vlan_id}") + # if "-" in vlan_id: + # start, end = map(int, vlan_id.split("-")) + # for id in range(start, end + 1): + # if str(id) not in vlan_map: + # print(f"Error: VLAN {id} found on interface, but not found in vlan db.") + # else: + # tagged_vlans.append({"name": vlan_map[str(id)], "id": str(id)}) + # else: + # if vlan_id not in vlan_map: + # print(f"Error: VLAN {vlan_id} found on interface, but not found in vlan db.") + # else: + # tagged_vlans.append({"name": vlan_map[vlan_id], "id": vlan_id}) + + # interface_dict[canonical_name]["tagged_vlans"] = tagged_vlans + # interface_dict[canonical_name]["untagged_vlan"] = { + # "name": vlan_map[item["native_vlan"]], + # "id": item["native_vlan"], + # } + # else: + # interface_dict[canonical_name]["802.1Q_mode"] = "" + # except KeyError as e: + # print(f"Error: VLAN not found on interface for interface {canonical_name} {e}") + # continue + + for interface in interface_dict.values(): + for key, default in default_values.items(): + interface.setdefault(key, default) + + for interface, data in interface_dict.items(): + ip_addresses = data.get("ip_addresses", {}) + if ip_addresses: + data["ip_addresses"] = [ip_addresses] + + # Convert interface names to canonical form + interface_list = [] + for interface_name, interface_info in interface_dict.items(): + interface_list.append({canonical_interface_name(interface_name): interface_info}) + + # Populate vrf_dict from commands and add to interface_dict + vrf_dict = {vrf["id"]: vrf for vrf in vrfs_rds} + + for interface in vrfs_interfaces: + vrf_id = interface["id"] + if "interfaces" not in vrf_dict[vrf_id]: + vrf_dict[vrf_id]["interfaces"] = [] + vrf_dict[vrf_id]["interfaces"].append(interface["interface"]) + + vrf_list = list(vrf_dict.values()) + for vrf in vrf_list: + try: if "interfaces" in vrf: for interface in vrf["interfaces"]: canonical_name = canonical_interface_name(interface) @@ -386,25 +446,26 @@ def format_nxos_results(device): } else: interface_dict[canonical_name]["vrf"] = {"name": vrf["name"], "rd": vrf["default_rd"]} - - device["interfaces"] = interface_list - device["serial"] = serial - try: - del device["mtu"] - del device["type"] - del device["ip_addresses"] - del device["prefix_length"] - del device["mac_address"] - del device["description"] - del device["link_status"] - del device["mode"] - del device["vrf_rds"] - del device["vrf_interfaces"] - except KeyError: - device = {"failed": True, "failed_reason": f"Formatting error for device {device}"} - except Exception: - device = {"failed": True, "failed_reason": f"Formatting error for device {device}"} + print(f"Error: VRF configuration on interface {interface} not as expected.") + continue + + device["interfaces"] = interface_list + device["serial"] = serial + + del device["mtu"] + del device["type"] + del device["ip_addresses"] + del device["prefix_length"] + del device["mac_address"] + del device["description"] + del device["link_status"] + # del device["mode"] + del device["vrf_rds"] + del device["vrf_interfaces"] + del device["vlans"] + del device["interface_vlans"] + return device From 647b8a7aff2f71997695a36ecd3d37d85e91aea7 Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Fri, 12 Apr 2024 20:40:34 +0000 Subject: [PATCH 209/225] update --- nautobot_device_onboarding/nornir_plays/formatter.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nautobot_device_onboarding/nornir_plays/formatter.py b/nautobot_device_onboarding/nornir_plays/formatter.py index c2bf6092..dd893708 100755 --- a/nautobot_device_onboarding/nornir_plays/formatter.py +++ b/nautobot_device_onboarding/nornir_plays/formatter.py @@ -354,7 +354,7 @@ def format_nxos_results(device): default_values = {"lag": "", "untagged_vlan": {}, "tagged_vlans": [], "vrf": {}, "802.1Q_mode": ""} vlan_map = {vlan["vlan_id"]: vlan["vlan_name"] for vlan in vlan_list} - print(f"vlan_map: {vlan_map}, interface_vlan_list: {interface_vlan_list}") + # print(f"vlan_map: {vlan_map}, interface_vlan_list: {interface_vlan_list}") # for item in interface_vlan_list: # try: # if not item["interface"]: From 2fedc655fee0b9e844aa18d1bdf287fc116d1dc4 Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Fri, 12 Apr 2024 20:40:59 +0000 Subject: [PATCH 210/225] vlans add --- .../command_mappers/cisco_nxos.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml index 359115d2..5cebf7d1 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml @@ -81,3 +81,13 @@ network_importer: - command: "show vrf detail" use_textfsm: true jpath: "[*].{id: id, name: name, default_rd: default_rd}" + vlans: + commands: + - command: "show vlan" + use_textfsm: true + jpath: "[*].{interfaces: interfaces, vlan_name: vlan_name, vlan_id: vlan_id, status: status}" + interface_vlans: + commands: + - command: "show interface switchport" + use_textfsm: true + jpath: "[*].{interface: interface, mode:mode, access_vlan: access_vlan, access_vlan_name: access_vlan_name, native_vlan: native_vlan, trunking_vlans: trunking_vlans, voice_vlan: voice_vlan}" From e7791b9a4e9fd3a05686afa6724028092162ff57 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Fri, 12 Apr 2024 17:31:12 -0500 Subject: [PATCH 211/225] first pass at adding docs for new ssot features --- docs/dev/extending.md | 13 +++ docs/dev/onboarding_extensions.md | 3 + docs/user/app_detailed_design.md | 33 +++++++ docs/user/app_getting_started.md | 11 ++- docs/user/app_overview.md | 55 ++++++++++- docs/user/app_use_cases.md | 117 +++++++++++------------ docs/user/app_use_cases_legacy.md | 145 +++++++++++++++++++++++++++++ docs/user/external_interactions.md | 3 +- docs/user/faq.md | 13 +-- 9 files changed, 321 insertions(+), 72 deletions(-) create mode 100755 docs/user/app_detailed_design.md mode change 100644 => 100755 docs/user/app_use_cases.md create mode 100644 docs/user/app_use_cases_legacy.md diff --git a/docs/dev/extending.md b/docs/dev/extending.md index d781f0bc..06b8e7c6 100644 --- a/docs/dev/extending.md +++ b/docs/dev/extending.md @@ -7,3 +7,16 @@ Extending the application is welcome, however it is best to open an issue first, This plugin provides methods to customize onboarding behavior. By creating onboarding extensions, it is possible to onboard switch stacks, HA pairs and perform other customizations. Please see the dedicated FAQ for [device onboarding extensions](onboarding_extensions.md). + +!!! warn + This is the legacy onboarding extensions. The next section covers how to extend the new framework. + +## Extending SSoT jobs (Sync Devices, and Sync Network Data) + +Extending the platform support for the SSoT specific jobs should be accomplished with adding a yaml file that defines commands, jdiff jmespaths, and post_processors. A PR into this library is welcomed, but this app exposes the Nautobot core datasource capabilities to be able to load in overrides from a Git repository. + +### Adding Platform/OS Support + +New platform support should be simplified in this framework, by providing a YAML file. + +TODO: diff --git a/docs/dev/onboarding_extensions.md b/docs/dev/onboarding_extensions.md index 0e1ab603..9548749f 100644 --- a/docs/dev/onboarding_extensions.md +++ b/docs/dev/onboarding_extensions.md @@ -1,5 +1,8 @@ # Onboarding Extensions +!!! warn + This is the legacy onboarding extensions. + ## What are onboarding extensions? Onboarding Extensions are Python modules that are dynamically loaded and executed as a part of device onboarding. diff --git a/docs/user/app_detailed_design.md b/docs/user/app_detailed_design.md new file mode 100755 index 00000000..3c143d7e --- /dev/null +++ b/docs/user/app_detailed_design.md @@ -0,0 +1,33 @@ +# Network SSoT Detailed Design + +This page will describe the newer SSoT jobs that this App exposes and how they work. + +## Frameworks in Use + +- [Nautobot SSoT](https://docs.nautobot.com/projects/ssot/en/latest/) - Utilzing the existing Nautobot SSoT framework allows a common pattern to be re-used and offers a path forward to add additional support and features. +- [Nautobot App Nornir](https://docs.nautobot.com/projects/plugin-nornir/en/latest/) - Utilized for Nornir Inventory plugins for Nautobot (specifically for Sync Network Data Job). +- [Nornir Netmiko](https://github.com/ktbyers/nornir_netmiko) - Used to execute commands and return results. +- [Jdiff](https://jdiff.readthedocs.io/en/latest/usage/#extract_data_from_json) - Used to simplify parsing required data fields out of command outputs returned from command parser libraries like textFSM. Specifically `extract_data_from_json` method. +- Parsers - Initially NTC Templates via textFSM, but future support for PyATS, TTP, etc. is expected in the future. + +## How the SSoT **Sync Devices** Job Works + +1. The job is executed with inputs selected. + - List of comma seperated IP/DNS names is provided. + - Other required fields are selected in the job inputs form. + +2. The SSoT framework loads the Nautobot adapter information. +3. The SSoT frameworks network adapater `load()` method calls nornir functionality. + - The job inputs data from the job inputs from are passed to the InitNornir initializer, because we only have basic information a custom `EmptyInventory` Nornir inventory plugin is packaged with the App. This get initialized in the `INitNornir` function, but actually initialzes a true inventory that is empty. + - Since `Platform` information may need to be auto-detected before adding a Nornir `Host` object to the inventory, a `create_inventory` function is executed that uses the SSH-Autodetect via Netmiko to try to determine the platform so it can be injected into the `Host` object. + - Finally, all the platform specific commands to run, along with all the jpath, post_processor information loaded from the platform specific YAML files must be injected into the Nornir data object to be accessible later in the extract, transform functions. +4. Within the context of a Nornir `with_processor` context manager call the netmiko_send command Nornir play. + - Access the loaded platform specific YAML data and deduplicate commands to avoid running the same command multiple times. E.g. Multiple required data attributes come from the same show command. +5. Utilize native Nornir Processor to overload functionality on `subtask_instance_completed()` to run command outputs through extract and transformation functions. + - This essentially is our "ET" portion of a "ETL" process. + - Next, the JSON result from the show command after the parser executes (E.g. textfsm), gets run through the jdiff function `extract_data_from_json()` with the data and the `jpath` from the YAML file definition. + - Finally, an option `post_processor` jinja2 capable execution can further transform the data for that command before passing it to finish the SSoT syncronizaton. + +## How the SSoT **Sync Network Data** Job Works + + diff --git a/docs/user/app_getting_started.md b/docs/user/app_getting_started.md index 5c3ba20f..21679595 100644 --- a/docs/user/app_getting_started.md +++ b/docs/user/app_getting_started.md @@ -8,6 +8,8 @@ To install the App, please follow the instructions detailed in the [Installation ## First steps with the App +This App exposes a legacy device onboarding job, as well as two new SSoT based jobs that are considered the future of the App. + ### Prerequisites You will need: @@ -21,9 +23,16 @@ The device must be reachable from the Nautobot and Nautobot worker instances (us Locations are the only other Nautobot prerequisite for the plugin to onboard a device. +!!! info + There are a few other requirements for the new SSoT based jobs, but can also support some defaults. + ### Onboarding a Device -Navigate to the Device Onboarding Job: Jobs > Perform Device Onboarding. +Navigate to the Device Onboarding Job: Jobs > Perform Device Onboarding (Legacy). + +or + +Navigate to the SSoT dashboard and run `Sync Devices` to get basic device and information onboarding, followed by `Sync Network Data` to add additonal details from the network to these devices. E.g. Interfaces, IPs, VRFs, VLANs. ## What are the next steps? diff --git a/docs/user/app_overview.md b/docs/user/app_overview.md index 255849ac..9d3cde24 100644 --- a/docs/user/app_overview.md +++ b/docs/user/app_overview.md @@ -9,6 +9,11 @@ This [Nautobot](https://github.com/nautobot/nautobot) App allows to easily onboa ## Description/Overview +### Legacy Implementation + +!!! info + The legacy job and extensions pattern will remain a part of this App for the near future, this will allow custom extensions to continue working without causes issues to users that have taken the time and understand the legacy framework. The newer SSoT implementation will be discussed in the next section. + The `nautobot-device-onboarding` app uses the [netmiko](https://github.com/ktbyers/netmiko) and [NAPALM](https://napalm.readthedocs.io/en/latest/) libraries to simplify the onboarding process of a new device into Nautobot down to, in many cases, an *IP Address* and a *Location*. In some cases, the user may also have to specify a specific *Device Platform* and *Device Port*. Regardless, the Onboarding App greatly simplifies the onboarding process by allowing the user to specify a small amount of info and having the app populate a much larger amount of device data in Nautobot. @@ -38,6 +43,53 @@ For example, getting the Management IP and Platform data into Nautobot allows a One example of a solution that can retrieve that additional device data and import it into Nautobot is the [Network Importer](https://github.com/networktocode/network-importer). Other options would include an Ansible playbook or a Python script. +### New SSoT Implementation + +The new implementation of device onboarding in this app is utilizing the SSoT framework; the main reasons for providing the new mechanisms were to solve the following challenges: + +- Make it easier to extending and add additonal vendor/OS support. +- Collapse this app and the external [network-importer]() into the same Nautobot app for simplified device onboarding with more object support.' + - Remove the Batfish dependency. +- Re-use backend plugins and libraries such as `nautobot-app-nornir` to provide the a similar feeling to other plugins like `nautobot-app-golden-config`. +- Utilize SSoT framework and the new `contrib` functionality to speed up development of new features. +- By collapsing + +Expose two new SSoT based Nautobot jobs to perform the syncing of data. + +1. `Sync Device SSoT Job` - Takes mininum inputs nearly identical to the legacy job (IP, Locaiton, SecretGroup), and create a device with bare minium information to be able to manage a device. This job syncs data from the network itself and creates a device with the follow attributes. + - Hostname + - Serial Number + - Device Type + - Platform + - Management Interface + - Management IP address (creates a prefix if one doesn't exist for the IP discovered.) + +2. `Sync Network Data SSoT Job` - From a provided list of existing Nautobot device objects, sync in additional metadata from a network device to enhance the available data from the network in Nautobot. + - All interfaces on the device with plus the attributes below: + - Interface Name + - MTU + - Description + - Interface type (limited support. Default: 'Other') + - Mac Address + - Link Status + - Interface Mode + - VLANs + - Vlans + - Untagged and Tagged + - VRFs + - VRF Names + - Route Distinguishers (RD) + - Cabling (Coming soon...) + +!!! info + For more information look at the provided jsonschema definitions for each of the jobs. + +Additional References: + +- For more information see [App Use Cases](./app_use_cases.md). +- To understand the lower level details of how the Network-SSoT framework is designed see [Network-SSoT Design](./app_detailed_design.md) +- To learn how to add additonal platform/OS support visit [Extending](./external_interactions.md). + ## Audience (User Personas) - Who should use this App? The Onboarding App is meant for new Nautobot users who want to start importing their devices directly rather than from another, existing, source. Even with other sources for device information, they may not include everything that is necessary. @@ -48,7 +100,7 @@ Existing Nautobot users may want to incorporate the Onboarding App as part of on ### Authors -@mzb and @dgarros +@mzb and @dgarros and many other great contributors! ### Maintainers @@ -61,3 +113,4 @@ Existing Nautobot users may want to incorporate the Onboarding App as part of on - Secrets & SecretsGroup - Jobs +- Datasources diff --git a/docs/user/app_use_cases.md b/docs/user/app_use_cases.md old mode 100644 new mode 100755 index a918282d..e7c1cc05 --- a/docs/user/app_use_cases.md +++ b/docs/user/app_use_cases.md @@ -1,37 +1,39 @@ # Using the App -This document describes common use-cases and scenarios for this App. +This document describes common use-cases and scenarios for this App utilizing the exposed SSoT jobs. ## General Usage +This App can be used in three general ways. + +1. Onboard a device with basic information. (Name, Serial, Device Type, Management IP + Interface) +2. Take existing devices and enhace the data for each device by syncing in more metadata. (Interface, VLANs, VRFs etc.) +3. Both 1 and 2 in conjunction with each other. + ### Preparation To properly onboard a device, a user needs to provide, at a minimum: 1. The Device's Location 2. The Device's primary IP address or DNS Name +3. Selecting other attributes metadata needed. (Default statuses, roles, etc.) !!! note - For DNS Name Resolution to work, the instance of Nautobot must be able to resolve the name of the device to IP address. - -If other attributes (`Platform`, `Device Type`, `Role`) are provided in the onboarding job, the app will use provided value for the onboarded device. + For DNS Name Resolution to work, the Celery instance of Nautobot must be able to resolve the name of the device to IP address. If `Platform`, `Device Type` and/or `Role` are not provided, the plugin will try to identify this information automatically and, based on the settings, it can create them in Nautobot as needed. !!! note - If the Platform is provided, it must point to an existing Nautobot Platform. NAPALM driver of this platform will be used only if it is defined for the platform in Nautobot. - To use a preferred NAPALM driver, either define it in Nautobot per platform or in the plugins settings under `platform_map`. + The SSoT jobs use nornir-netmiko to run the show commands defined in the command mappers. #### SSH Autodetect -The `nautobot-device-onboarding` app recognizes platform types with a Netmiko SSH Autodetect mechanism. The user may need to specify additional information for platforms where Netmiko's `ssh_autodetect` feature does not work. +The `nautobot-device-onboarding` apps `Sync Devices` job recognizes platform types with a Netmiko SSH Autodetect mechanism. The user may need to specify additional information for platforms where Netmiko's `ssh_autodetect` feature does not work. [Here is the list](https://github.com/ktbyers/netmiko/blob/v3.4.0/netmiko/ssh_autodetect.py#L50) of platforms supported by `ssh_autodetect`. The `nautobot-device-onboarding` app can be used with any devices that are supported by NAPALM. Even custom NAPALM driver plugins can be used with a bit of effort. -Devices that are supported by NAPALM but are not running SSH or don't have support for `ssh_autodetect` will still work with this app, but will require some additional information in the onboarding job. - The table below shows which common platforms will be SSH auto-detected by default. |Platform |Platform Autodetect| @@ -44,8 +46,7 @@ Arista EOS | No| For the platforms where SSH auto-detection does not work, the user will need to: -1. Manually define a Platform in Nautobot (this will be a one-time task in order to support any number of devices using this Platform) -2. During onboarding, a Port and Platform must explicitly be specified (in addition to the IP and Location) +1. Select the platform in the job inputs form. ### IOS and Junos Auto-Created Platforms @@ -57,78 +58,36 @@ The Onboarding App will automatically create Platforms for vendor operating syst ## Use-cases and common workflows -### Create a New Platform - -This section demonstrates how to create a new Platform in the Nautobot UI. Specifically, it offers examples for creating platforms for Cisco `nxapi` and Arista `eos` devices, but the concepts are applicable to any Platform that is manually created. - -- In the Nautobot dropdown menu, go to `Devices--> Platforms--> Add/+`. -- Define the attributes for the Platform on this screen and click on the 'Create' button. -- 'Manufacturer' and 'NAPALM arguments' are optional. - -!!! note - Slugs have been deprecated in Nautobot 2. The Platform `Network driver` will now be used to determine the driver to use. - -#### Cisco NXOS Platform - -A Platform that will work with NXOS devices running the `nxapi` feature must have specific values for these attributes: - -- `Network driver` **SHOULD** be `cisco_nxos`. -- `NAPALM driver` **MUST** be `nxos`. - -#### Arista EOS Platform - -A Platform that will work with Arista EOS devices must have specific values for these attributes: - -- `Network driver` **SHOULD** be `arista_eos`. -- `NAPALM driver` **MUST** be `eos`. - ### Onboard a New Device A new device can be onboarded via : -- A job execution. -- API, via a `POST` to `/api/extras/jobs/Perform%20Device%20Onboarding/run` or `/api/extras/jobs/{id}/run` +- A SSoT job execution. + - Via Jobs menu + - Via SSoT Dashboard +- API, via a `POST` to `/api/extras/jobs/TODO/run` or `/api/extras/jobs/{id}/run` !!! note - The Device Onboarding Job's ID (UUID) will be different per Nautobot instance. + The SSoT Job's ID (UUID) will be different per Nautobot instance. During a successful onboarding process, a new device will be created in Nautobot with its management interface and its primary IP assigned. The management interface will be discovered on the device based on the IP address provided. -!!! note - By default, the app is using the credentials defined in the main `nautobot_config.py` for Napalm (`NAPALM_USERNAME`/`NAPALM_PASSWORD`/`NAPALM_ARGS`). It's possible to define specific credentials for each onboarding job execution. - -### Onboard a Cisco NXOS Device Running the `nxapi` Feature - -When onboarding an NXOS device with the `nxapi` feature, there are a few requirements: - -- The `Port` must be the same value configured for `nxapi https port` on the Cisco Nexus device -- The `Platform` must be explicitly set to be one with the specific parameters in the [Cisco NXOS Platform](#cisco-nxos-platform) section - -### Onboarding an Arista EOS Device - -When onboarding an Arista EOS device, there are a few requirements: - -- The `Port` must be the same value configured for HTTPS on the Arista device -- The `Platform` must be explicitly set to be the one with the specific parameters in the [Arista EOS Platform](#arista-eos-platform) section +This SSoT job supports a bulk CSV execution option to speed up this process. - -### Consult the Status of Onboarding Tasks +### Consult the Status of the Sync Network Devices SSoT Job The status of onboarding jobs can be viewed via the UI (Jobs > Job Results) or retrieved via API (`/api/extras/job-results/`) with each process corresponding to an individual Job-Result object. # API -!!! note - In V3.0, with the move of the app to a job, the dedicated API views have been removed. This also removes API documentation from the built in Swagger API documentation. - -To run an onboarding task Job via the api: +To run the SSoT Sync Devices Job via the api: -Post to `/api/extras/jobs/Perform%20Device%20Onboarding/run/` with the relevent onboarding data: +Post to `/api/extras/jobs/TODO/run/` with the relevent onboarding data: ```bash -curl -X "POST" /api/extras/jobs/Perform%20Device%20Onboarding/run/ -H "Content-Type: application/json" -H "Authorization: Token $NAUTOBOT_TOKEN" -d '{"data": {"location": "", "ip_address": "", "port": 22, "timeout": 30}} +curl -X "POST" /api/extras/jobs/TODO/run/ -H "Content-Type: application/json" -H "Authorization: Token $NAUTOBOT_TOKEN" -d '{"data": {"location": "", "ip_address": "", "port": 22, "timeout": 30}} ``` Required Fields: @@ -143,3 +102,35 @@ Optional Fields: role: Role UUID device_type: Device Type UUID continue_on_failure: Boolean + +### Enhace Existing Device + +A existing devices data can be expanded to include additonal objects by: + +- A SSoT job execution. + - Via Jobs menu + - Via SSoT Dashboard +- API, via a `POST` to `/api/extras/jobs/TODO/run` or `/api/extras/jobs/{id}/run` + +!!! note + The SSoT Job's ID (UUID) will be different per Nautobot instance. + +During a successful network data sync process, a devices related objects will be created in Nautobot with all interfaces, their IP addresses, and optionally VLANs, and VRFs. + +### Consult the Status of the Sync Network Data SSoT Job + +The status of onboarding jobs can be viewed via the UI (Jobs > Job Results) or retrieved via API (`/api/extras/job-results/`) with each process corresponding to an individual Job-Result object. + +# API + +To run the SSoT Sync Devices Job via the api: + + +Post to `/api/extras/jobs/TODO/run/` with the relevent onboarding data: + +```bash +curl -X "POST" /api/extras/jobs/TODO/run/ -H "Content-Type: application/json" -H "Authorization: Token $NAUTOBOT_TOKEN" -d '{"data": {"devices": ""} +``` + +Required Fields: + devices: Location UUID diff --git a/docs/user/app_use_cases_legacy.md b/docs/user/app_use_cases_legacy.md new file mode 100644 index 00000000..a918282d --- /dev/null +++ b/docs/user/app_use_cases_legacy.md @@ -0,0 +1,145 @@ +# Using the App + +This document describes common use-cases and scenarios for this App. + +## General Usage + +### Preparation + +To properly onboard a device, a user needs to provide, at a minimum: + +1. The Device's Location +2. The Device's primary IP address or DNS Name + +!!! note + For DNS Name Resolution to work, the instance of Nautobot must be able to resolve the name of the device to IP address. + +If other attributes (`Platform`, `Device Type`, `Role`) are provided in the onboarding job, the app will use provided value for the onboarded device. + +If `Platform`, `Device Type` and/or `Role` are not provided, the plugin will try to identify this information automatically and, based on the settings, it can create them in Nautobot as needed. + +!!! note + If the Platform is provided, it must point to an existing Nautobot Platform. NAPALM driver of this platform will be used only if it is defined for the platform in Nautobot. + To use a preferred NAPALM driver, either define it in Nautobot per platform or in the plugins settings under `platform_map`. + +#### SSH Autodetect + +The `nautobot-device-onboarding` app recognizes platform types with a Netmiko SSH Autodetect mechanism. The user may need to specify additional information for platforms where Netmiko's `ssh_autodetect` feature does not work. + +[Here is the list](https://github.com/ktbyers/netmiko/blob/v3.4.0/netmiko/ssh_autodetect.py#L50) of platforms supported by `ssh_autodetect`. + +The `nautobot-device-onboarding` app can be used with any devices that are supported by NAPALM. Even custom NAPALM driver plugins can be used with a bit of effort. + +Devices that are supported by NAPALM but are not running SSH or don't have support for `ssh_autodetect` will still work with this app, but will require some additional information in the onboarding job. + +The table below shows which common platforms will be SSH auto-detected by default. + +|Platform |Platform Autodetect| +--------------|-------------------- +Juniper/Junos | Yes (when running Netconf over SSH)| +Cisco IOS-XE |Yes| +Cisco NXOS (ssh) | Yes| +Cisco NXOS (nxapi)| No| +Arista EOS | No| + +For the platforms where SSH auto-detection does not work, the user will need to: + +1. Manually define a Platform in Nautobot (this will be a one-time task in order to support any number of devices using this Platform) +2. During onboarding, a Port and Platform must explicitly be specified (in addition to the IP and Location) + +### IOS and Junos Auto-Created Platforms + +The Onboarding App will automatically create Platforms for vendor operating systems where platform auto-detection works. The picture below shows the details of auto-created Platforms for `cisco_ios` and `juniper_junos`. + +![cisco_ios_platform](../images/platform_cisco_ios.png) +![juniper_junos_platform](../images/platform_juniper_junos.png) + + +## Use-cases and common workflows + +### Create a New Platform + +This section demonstrates how to create a new Platform in the Nautobot UI. Specifically, it offers examples for creating platforms for Cisco `nxapi` and Arista `eos` devices, but the concepts are applicable to any Platform that is manually created. + +- In the Nautobot dropdown menu, go to `Devices--> Platforms--> Add/+`. +- Define the attributes for the Platform on this screen and click on the 'Create' button. +- 'Manufacturer' and 'NAPALM arguments' are optional. + +!!! note + Slugs have been deprecated in Nautobot 2. The Platform `Network driver` will now be used to determine the driver to use. + +#### Cisco NXOS Platform + +A Platform that will work with NXOS devices running the `nxapi` feature must have specific values for these attributes: + +- `Network driver` **SHOULD** be `cisco_nxos`. +- `NAPALM driver` **MUST** be `nxos`. + +#### Arista EOS Platform + +A Platform that will work with Arista EOS devices must have specific values for these attributes: + +- `Network driver` **SHOULD** be `arista_eos`. +- `NAPALM driver` **MUST** be `eos`. + + +### Onboard a New Device + +A new device can be onboarded via : + +- A job execution. +- API, via a `POST` to `/api/extras/jobs/Perform%20Device%20Onboarding/run` or `/api/extras/jobs/{id}/run` + +!!! note + The Device Onboarding Job's ID (UUID) will be different per Nautobot instance. + +During a successful onboarding process, a new device will be created in Nautobot with its management interface and its primary IP assigned. The management interface will be discovered on the device based on the IP address provided. + +!!! note + By default, the app is using the credentials defined in the main `nautobot_config.py` for Napalm (`NAPALM_USERNAME`/`NAPALM_PASSWORD`/`NAPALM_ARGS`). It's possible to define specific credentials for each onboarding job execution. + +### Onboard a Cisco NXOS Device Running the `nxapi` Feature + +When onboarding an NXOS device with the `nxapi` feature, there are a few requirements: + +- The `Port` must be the same value configured for `nxapi https port` on the Cisco Nexus device +- The `Platform` must be explicitly set to be one with the specific parameters in the [Cisco NXOS Platform](#cisco-nxos-platform) section + +### Onboarding an Arista EOS Device + +When onboarding an Arista EOS device, there are a few requirements: + +- The `Port` must be the same value configured for HTTPS on the Arista device +- The `Platform` must be explicitly set to be the one with the specific parameters in the [Arista EOS Platform](#arista-eos-platform) section + + +### Consult the Status of Onboarding Tasks + +The status of onboarding jobs can be viewed via the UI (Jobs > Job Results) or retrieved via API (`/api/extras/job-results/`) with each process corresponding to an individual Job-Result object. + +# API + +!!! note + In V3.0, with the move of the app to a job, the dedicated API views have been removed. This also removes API documentation from the built in Swagger API documentation. + +To run an onboarding task Job via the api: + + +Post to `/api/extras/jobs/Perform%20Device%20Onboarding/run/` with the relevent onboarding data: + +```bash +curl -X "POST" /api/extras/jobs/Perform%20Device%20Onboarding/run/ -H "Content-Type: application/json" -H "Authorization: Token $NAUTOBOT_TOKEN" -d '{"data": {"location": "", "ip_address": "", "port": 22, "timeout": 30}} +``` + +Required Fields: + location: Location UUID + ip_address: String of IP or CSV of IPs + port: Integer + timeout: Integer + +Optional Fields: + credentials: Secret Group UUID + platform: Platform UUID + role: Role UUID + device_type: Device Type UUID + continue_on_failure: Boolean diff --git a/docs/user/external_interactions.md b/docs/user/external_interactions.md index d167706b..891ddcd8 100644 --- a/docs/user/external_interactions.md +++ b/docs/user/external_interactions.md @@ -6,4 +6,5 @@ This document describes external dependencies and prerequisites for this App to ### From the App to Other Systems -The App uses [netmiko](https://github.com/ktbyers/netmiko) and [NAPALM](https://napalm.readthedocs.io/en/latest/) libraries to connect to network devices. +- The App uses [netmiko](https://github.com/ktbyers/netmiko) and [NAPALM](https://napalm.readthedocs.io/en/latest/) libraries to connect to network devices. +- Git integrations(optional) allow a user to override default Command Getter YAML files. diff --git a/docs/user/faq.md b/docs/user/faq.md index 2cf8fe66..4572a0c0 100644 --- a/docs/user/faq.md +++ b/docs/user/faq.md @@ -16,7 +16,7 @@ You need to disable automatic platform detection, specify the device platform ty ## Is it possible to disable the automatic creation of Device Type, Device Role or Platform? -**Yes**! Using the plugin settings, it's possible to control individually the creation of `device_role`, `device_type`, `manufacturer` & `platform`. +**Yes** (Legacy)! Using the plugin settings, it's possible to control individually the creation of `device_role`, `device_type`, `manufacturer` & `platform`. ```python # configuration.py @@ -32,12 +32,16 @@ PLUGINS_CONFIG = { } ``` +**Yes** (SSoT)! Using the job for input selections, it's possible to control individually the creation of `device_role`, `device_type`, `manufacturer` & `platform`. + ## How can I update the default credentials used to connect to a device? By default, the plugin uses the credentials defined in the main `nautobot_config.py` for NAPALM (`NAPALM_USERNAME`/`NAPALM_PASSWORD`/`DEVICE_ARGS`). You can update the default credentials in `nautobot_config.py ` or you can provide specific one for each onboarding job via a SecretsGroup. If using SecretsGroup the Access Type for the associated Secrets must be `Generic` and at minimum associated Secrets for `Username` & `Password` are required with `Secret` being optional. !!! warning - If an enable secret is required for the remote device it must be set using above patters. + If an enable secret is required for the remote device it must be set using above patterns. + +For the SSoT onboarding based jobs SecretGroups are required. ## How can I update the optional arguments for NAPALM? @@ -45,10 +49,7 @@ Optional arguments are often used to define a `secret` for Cisco devices and oth ## Does this app support the discovery and the creation of all interfaces and IP Addresses? -**No**. The plugin will only discover and create the management interface and the management IP address. Importing all interfaces and IP addresses is a much larger problem that requires more preparation. This is out of scope of this project. - -!!! tip - We recommend Network Importer tool from Network to Code for a post-onboarding network state synchronization. See [its GitHub repository](https://github.com/networktocode/network-importer) for more details. +**Yes**. The legacy Deivce Onboarding job/SSot Sync Devices will only discover and create the management interface and the management IP address. Importing all interfaces and IP addresses is available from the SSoT job (Sync Network Data). ## Does this app support the discovery of device based on fqdn? From 8ff33513a1c17080076924187ef7e3f517086082 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Mon, 15 Apr 2024 09:10:05 -0500 Subject: [PATCH 212/225] more doc adds --- docs/user/app_detailed_design.md | 24 +++++++++++++++++++----- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/docs/user/app_detailed_design.md b/docs/user/app_detailed_design.md index 3c143d7e..e5029675 100755 --- a/docs/user/app_detailed_design.md +++ b/docs/user/app_detailed_design.md @@ -18,16 +18,30 @@ This page will describe the newer SSoT jobs that this App exposes and how they w 2. The SSoT framework loads the Nautobot adapter information. 3. The SSoT frameworks network adapater `load()` method calls nornir functionality. - - The job inputs data from the job inputs from are passed to the InitNornir initializer, because we only have basic information a custom `EmptyInventory` Nornir inventory plugin is packaged with the App. This get initialized in the `INitNornir` function, but actually initialzes a true inventory that is empty. + - The job inputs data is passed to the InitNornir initializer, because we only have basic information a custom `EmptyInventory` Nornir inventory plugin is packaged with the App. This get initialized in the `InitNornir` function, but actually initialzes a true inventory that is empty. - Since `Platform` information may need to be auto-detected before adding a Nornir `Host` object to the inventory, a `create_inventory` function is executed that uses the SSH-Autodetect via Netmiko to try to determine the platform so it can be injected into the `Host` object. - - Finally, all the platform specific commands to run, along with all the jpath, post_processor information loaded from the platform specific YAML files must be injected into the Nornir data object to be accessible later in the extract, transform functions. -4. Within the context of a Nornir `with_processor` context manager call the netmiko_send command Nornir play. + - Finally, all the platform specific commands to run, along with all the jpath, `post_processor` information loaded from the platform specific YAML files must be injected into the Nornir data object to be accessible later in the extract, transform functions. +4. Within the context of a Nornir `with_processor` context manager call the `netmiko_send_commands` Nornir task. - Access the loaded platform specific YAML data and deduplicate commands to avoid running the same command multiple times. E.g. Multiple required data attributes come from the same show command. 5. Utilize native Nornir Processor to overload functionality on `subtask_instance_completed()` to run command outputs through extract and transformation functions. - This essentially is our "ET" portion of a "ETL" process. - Next, the JSON result from the show command after the parser executes (E.g. textfsm), gets run through the jdiff function `extract_data_from_json()` with the data and the `jpath` from the YAML file definition. - - Finally, an option `post_processor` jinja2 capable execution can further transform the data for that command before passing it to finish the SSoT syncronizaton. + - Finally, an optional `post_processor` jinja2 capable execution can further transform the data for that command before passing it to finish the SSoT syncronizaton. ## How the SSoT **Sync Network Data** Job Works - +1. The job is executed with inputs selected. + - One or multiple device selection. + - Other required fields are selected in the job inputs form. + - Toggle certain metadata booleans to True if you want more data synced. + +2. The SSoT framework loads the Nautobot adapter information. +3. The SSoT frameworks network adapater `load()` method calls Nornir functionality. + - The job inputs data is passed to the InitNornir initializer, because devices now exist in Nautobot we use `NautobotORMInventory` Nornir inventory plugin comes from `nautobot-plugin-nornir`. + - Finally, all the platform specific `commands` to run, along with all the `jpath`, `post_processor` information loaded from the platform specific YAML files must be injected into the Nornir data object to be accessible later in the extract, transform functions. +4. Within the context of a Nornir `with_processor` context manager call the `netmiko_send_commands` Nornir task. + - Access the loaded platform specific YAML data and deduplicate commands to avoid running the same command multiple times. E.g. Multiple required data attributes come from the same show command. +5. Utilize native Nornir Processor to overload functionality on `subtask_instance_completed()` to run command outputs through extract and transformation functions. + - This essentially is our "ET" portion of a "ETL" process. + - Next, the JSON result from the show command after the parser executes (E.g. textfsm), gets run through the jdiff function `extract_data_from_json()` with the data and the `jpath` from the YAML file definition. + - Finally, an optional `post_processor` jinja2 capable execution can further transform the data for that command before passing it to finish the SSoT syncronizaton. From 762ca74584f9b398e4b66402dcad671b4bed0de8 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Mon, 15 Apr 2024 13:50:19 -0500 Subject: [PATCH 213/225] more doc adds --- docs/dev/extending.md | 2 +- docs/user/app_detailed_design.md | 7 +++++ docs/user/app_overview.md | 2 +- docs/user/app_use_cases.md | 18 ++++++++++--- docs/user/app_yaml_overrides.md | 44 ++++++++++++++++++++++++++++++++ mkdocs.yml | 3 +++ 6 files changed, 70 insertions(+), 6 deletions(-) create mode 100755 docs/user/app_yaml_overrides.md diff --git a/docs/dev/extending.md b/docs/dev/extending.md index 06b8e7c6..452ffac1 100644 --- a/docs/dev/extending.md +++ b/docs/dev/extending.md @@ -19,4 +19,4 @@ Extending the platform support for the SSoT specific jobs should be accomplished New platform support should be simplified in this framework, by providing a YAML file. -TODO: +The format of these YAML files are and how to extend this application is covered in [App YAML Overrides](./app_yaml_overrides.md). diff --git a/docs/user/app_detailed_design.md b/docs/user/app_detailed_design.md index e5029675..c7bb6fb3 100755 --- a/docs/user/app_detailed_design.md +++ b/docs/user/app_detailed_design.md @@ -45,3 +45,10 @@ This page will describe the newer SSoT jobs that this App exposes and how they w - This essentially is our "ET" portion of a "ETL" process. - Next, the JSON result from the show command after the parser executes (E.g. textfsm), gets run through the jdiff function `extract_data_from_json()` with the data and the `jpath` from the YAML file definition. - Finally, an optional `post_processor` jinja2 capable execution can further transform the data for that command before passing it to finish the SSoT syncronizaton. + +## Detailed Design Diagram + +Here are two diagrams detailing the SSoT based jobs in deeper detail. + +![Sync Devices](). +![Sync Network Data](). diff --git a/docs/user/app_overview.md b/docs/user/app_overview.md index 9d3cde24..44835cbe 100644 --- a/docs/user/app_overview.md +++ b/docs/user/app_overview.md @@ -48,7 +48,7 @@ One example of a solution that can retrieve that additional device data and impo The new implementation of device onboarding in this app is utilizing the SSoT framework; the main reasons for providing the new mechanisms were to solve the following challenges: - Make it easier to extending and add additonal vendor/OS support. -- Collapse this app and the external [network-importer]() into the same Nautobot app for simplified device onboarding with more object support.' +- Collapse this app and the external [Network Importer](https://github.com/networktocode/network-importer) into the same Nautobot app for simplified device onboarding with more object support.' - Remove the Batfish dependency. - Re-use backend plugins and libraries such as `nautobot-app-nornir` to provide the a similar feeling to other plugins like `nautobot-app-golden-config`. - Utilize SSoT framework and the new `contrib` functionality to speed up development of new features. diff --git a/docs/user/app_use_cases.md b/docs/user/app_use_cases.md index e7c1cc05..1c7d30d4 100755 --- a/docs/user/app_use_cases.md +++ b/docs/user/app_use_cases.md @@ -56,8 +56,9 @@ The Onboarding App will automatically create Platforms for vendor operating syst ![juniper_junos_platform](../images/platform_juniper_junos.png) -## Use-cases and common workflows +# Use-cases and common workflows +## Onboarding a Device ### Onboard a New Device @@ -79,7 +80,7 @@ This SSoT job supports a bulk CSV execution option to speed up this process. The status of onboarding jobs can be viewed via the UI (Jobs > Job Results) or retrieved via API (`/api/extras/job-results/`) with each process corresponding to an individual Job-Result object. -# API +### API To run the SSoT Sync Devices Job via the api: @@ -103,7 +104,9 @@ Optional Fields: device_type: Device Type UUID continue_on_failure: Boolean -### Enhace Existing Device +## Onboarding Interface, Vlans, IPs Etc. + +### Enhance Existing Device A existing devices data can be expanded to include additonal objects by: @@ -121,7 +124,7 @@ During a successful network data sync process, a devices related objects will be The status of onboarding jobs can be viewed via the UI (Jobs > Job Results) or retrieved via API (`/api/extras/job-results/`) with each process corresponding to an individual Job-Result object. -# API +### API To run the SSoT Sync Devices Job via the api: @@ -134,3 +137,10 @@ curl -X "POST" /api/extras/jobs/TODO/run/ -H "Content-Type: applic Required Fields: devices: Location UUID + + +## Using Git(Datasources) to Override the Apps Defaults + +By utilizing the Nautobot core feature `Datasource` the command mappers, jpaths, post_processors for each platform can be overriden. This also gives an easy way for a user to add platform support without having to get those fixes directly upstreamed into this application. + +The format of these YAML files are and how to extend this application is covered in [App YAML Overrides](./app_yaml_overrides.md). diff --git a/docs/user/app_yaml_overrides.md b/docs/user/app_yaml_overrides.md new file mode 100755 index 00000000..0e111339 --- /dev/null +++ b/docs/user/app_yaml_overrides.md @@ -0,0 +1,44 @@ +# Extending and Overriding Platform YAML Files + +One element of the new SSoT based jobs this app exposes; is the attempt to create a framework that allows the definition of each platforms dependencies in a YAML format. + +## File Format +There are only a few components to the file and they're described below: + +- `ssot job name` - Name of the job to define the commands and metadata needed for that job. +- `root key data name` - Is fully defined in the schema definition. +- `commands` - List of commands to execute in order to get the required data. +- `command` - Actual `show` command to execute. +- `parser` - Whether to use a parser (textfsm, pyats, ttp, etc) alternatively `no` can be used if the platform supports some other method to return structured data. E.g. (`| display json`) or an equivalent. +- `jpath` - The jmespath (specifically jdiffs implementation) to extract the data from the parsed json returned from parser. +- `post_processor` - Jinja2 capable code to further transform the returned data post jpath extraction. + +As an example: + +```yaml +--- +device_onboarding: + hostname: + commands: + - command: "show version" + parser: "textfsm" + jpath: "[*].hostname" + post_processor: "{{ obj[0] | upper }}" +..omitted.. +``` + +## Using Datasource to Override + +This App provides sane defaults that have been tested, the files are located in the source code under `command_mappers`. There is potential for these sane defaults to not work in a given environment; alternatively you may want to add additional platform support in your deployment. These are the two main use cases to utilize the datasource feature this app exposes. + +!!! info + To avoid overly complicating the merge logic, the App will always prefer the platform specific YAML file loaded in from the git repository. + +### Properly Formatting Git Repository + +When loading from a Git Repository this App is expecting a root directory called `onboarding_command_mappers`. Each of the platform YAML files are then located in this directory. The YAML file names must be named `.yml`. + +### Setting up the Git Repository + +1. `Extensibility -> Git Repositories` +2. Create a new repository, most importantly selecting the `Provides` of `Onboarding Command Mappers` diff --git a/mkdocs.yml b/mkdocs.yml index d37c79b8..ed8a849f 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -102,8 +102,10 @@ nav: - App Overview: "user/app_overview.md" - Getting Started: "user/app_getting_started.md" - Using the App: "user/app_use_cases.md" + - Using the App (Legacy): "user/app_use_cases_legacy.md" - Frequently Asked Questions: "user/faq.md" - External Interactions: "user/external_interactions.md" + - Detailed Design: "user/app_detailed_design.md" - Administrator Guide: - Install and Configure: "admin/install.md" - Upgrade: "admin/upgrade.md" @@ -118,6 +120,7 @@ nav: - v1.0: "admin/release_notes/version_1.0.md" - Developer Guide: - Extending the App: "dev/extending.md" + - YAML Overrides: "dev/app_yaml_overrides.md" - Onboarding Extensions: "dev/onboarding_extensions.md" - Contributing to the App: "dev/contributing.md" - Development Environment: "dev/dev_environment.md" From 244ca9becd87a88ccece5947f2595753914241d5 Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Wed, 17 Apr 2024 21:27:46 +0000 Subject: [PATCH 214/225] vlans for nxos working --- .../command_mappers/cisco_ios.yml | 11 -- .../command_mappers/cisco_nxos.yml | 19 +- .../nornir_plays/formatter.py | 186 ++++++++++-------- 3 files changed, 120 insertions(+), 96 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/cisco_ios.yml b/nautobot_device_onboarding/command_mappers/cisco_ios.yml index 5086c7d2..1c810d03 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_ios.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_ios.yml @@ -34,12 +34,6 @@ network_importer: - command: "show version" use_textfsm: true jpath: "[*].serial[0]" - # interfaces: - # interfaces: - # commands: - # - command: "show interfaces" - # use_textfsm: true - # jpath: "[*].{interface: interface}" type: commands: - command: "show interfaces" @@ -80,11 +74,6 @@ network_importer: - command: "show vrf" use_textfsm: true jpath: "[*].{name: name, default_rd: default_rd, interfaces: interfaces}" - # mode: - # commands: - # - command: "show interfaces switchport" - # use_textfsm: true - # jpath: "[*].{interface: interface, mode: mode, admin_mode: admin_mode}" vlans: commands: - command: "show vlan" diff --git a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml index 5cebf7d1..7cf0d47e 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml @@ -17,15 +17,21 @@ device_onboarding: jpath: "[*].platform" mgmt_interface: commands: - - command: "show interface" + - command: "show ip interface vrf all" use_textfsm: true jpath: "[?ip_address=='{{ obj }}'].interface || [`mgmt0`]" mask_length: commands: - - command: "show interface" + - command: "show ip interface vrf all" use_textfsm: true - jpath: "[?ip_address=='{{ obj }}'].prefix_length || [`31`]" + jpath: "[?ip_address=='{{ obj }}'].subnet" + post_processor: "{% if '/' in obj[0] %}{{ obj[0].split('/')[1] }}{% else %}31{% endif %}" network_importer: + interface: + commands: + - command: "show interface" + use_textfsm: true + jpath: "[*].{interface: interface, interface: interface}" serial: commands: - command: "show inventory" @@ -38,14 +44,15 @@ network_importer: jpath: "[*].{interface: interface, type: hardware_type}" ip_addresses: commands: - - command: "show interface" + - command: "show ip interface vrf all" use_textfsm: true jpath: "[*].{interface: interface, ip_address: ip_address}" prefix_length: commands: - - command: "show interface" + - command: "show ip interface vrf all" use_textfsm: true - jpath: "[*].{interface: interface, prefix_length: prefix_length}" + jpath: "[*].{interface: interface, prefix_length: subnet}" + #post_processor: "[*].{interface: interface, prefix_length: subnet && map(&subnet, [?ip_address=='{{ obj }}'])}" mtu: commands: - command: "show interface" diff --git a/nautobot_device_onboarding/nornir_plays/formatter.py b/nautobot_device_onboarding/nornir_plays/formatter.py index 604d6251..c8d0bc80 100755 --- a/nautobot_device_onboarding/nornir_plays/formatter.py +++ b/nautobot_device_onboarding/nornir_plays/formatter.py @@ -6,6 +6,7 @@ from jdiff import extract_data_from_json from jinja2.sandbox import SandboxedEnvironment from netutils.interface import canonical_interface_name +from netutils.vlan import vlanconfig_to_list from nautobot.dcim.models import Device from nautobot_device_onboarding.constants import INTERFACE_TYPE_MAP_STATIC @@ -37,28 +38,28 @@ def perform_data_extraction(host, dict_field, command_info_dict, j2_env, task_re if show_command["command"] == task_result.name: jpath_template = j2_env.from_string(show_command["jpath"]) j2_rendered_jpath = jpath_template.render({"obj": host.name, "original_host": host.name}) - print(j2_rendered_jpath) + if not task_result.failed: if isinstance(task_result.result, str): try: result_to_json = json.loads(task_result.result) extracted_value = extract_data_from_json(result_to_json, j2_rendered_jpath) - print(f"extraced value: {extracted_value}") + except json.decoder.JSONDecodeError: extracted_value = None else: extracted_value = extract_data_from_json(task_result.result, j2_rendered_jpath) - print(f"extracted value 2: {extracted_value}") + if show_command.get("post_processor"): template = j2_env.from_string(show_command["post_processor"]) extracted_processed = template.render({"obj": extracted_value, "original_host": host.name}) - print(f"extracted 1: {extracted_processed}") + else: extracted_processed = extracted_value - print(f"extracted 2: {extracted_processed}") + if isinstance(extracted_value, list) and len(extracted_value) == 1: extracted_processed = extracted_value[0] - print(f"extracted 3: {extracted_processed}") + if command_info_dict.get("validator_pattern"): # temp validator if command_info_dict["validator_pattern"] == "not None": @@ -124,6 +125,15 @@ def ensure_list(data): return data +def extract_prefix_from_subnet(prefix_list): + for item in prefix_list: + if "prefix_length" in item and item["prefix_length"]: + item["prefix_length"] = item["prefix_length"].split("/")[-1] + else: + item["prefix_length"] = None + return prefix_list + + def format_ios_results(device): """Format the results of the show commands for IOS devices.""" serial = device.get("serial") @@ -213,7 +223,7 @@ def format_ios_results(device): interface_dict[canonical_name]["802.1Q_mode"] = "tagged" tagged_vlans = [] for vlan_id in trunking_vlans[0].split(","): - print(f"vlan_id: {vlan_id}") + if "-" in vlan_id: start, end = map(int, vlan_id.split("-")) for id in range(start, end + 1): @@ -293,7 +303,7 @@ def format_ios_results(device): def format_nxos_results(device): """Format the results of the show commands for NX-OS devices.""" - + interfaces = device.get("interface") serial = device.get("serial") mtus = device.get("mtu", []) types = device.get("type", []) @@ -302,20 +312,20 @@ def format_nxos_results(device): macs = device.get("mac_address", []) descriptions = device.get("description", []) link_statuses = device.get("link_status", []) - # modes = device.get("mode", []) vrfs_rd = device.get("vrf_rds", []) vrfs_interfaces = device.get("vrf_interfaces", []) vlans = device.get("vlans", []) interface_vlans = device.get("interface_vlans", []) + interface_list = ensure_list(interfaces) mtu_list = ensure_list(mtus) type_list = ensure_list(types) ip_list = ensure_list(ips) prefix_list = ensure_list(prefixes) + prefix_list = extract_prefix_from_subnet(prefix_list) mac_list = ensure_list(macs) description_list = ensure_list(descriptions) link_status_list = ensure_list(link_statuses) - # mode_list = ensure_list(modes) vlan_list = ensure_list(vlans) interface_vlan_list = ensure_list(interface_vlans) @@ -329,92 +339,109 @@ def format_nxos_results(device): vrfs_interfaces = ensure_list(vrfs_interfaces) interface_dict = {} + default_values = { + "mtu": "", + "type": "", + "ip_addresses": [], + "mac_address": "", + "description": "", + "link_status": False, + "lag": "", + "vrf": {}, + "802.1Q_mode": "", + "tagged_vlans": [], + "untagged_vlan": "", + } + for item in interface_list: + canonical_name = canonical_interface_name(item["interface"]) + interface_dict[canonical_name] = {**default_values} + for item in mtu_list: - interface_dict.setdefault(item["interface"], {})["mtu"] = item["mtu"] + canonical_name = canonical_interface_name(item["interface"]) + interface_dict.setdefault(canonical_name, {})["mtu"] = item["mtu"] for item in type_list: + canonical_name = canonical_interface_name(item["interface"]) interface_type = map_interface_type(item["type"]) - interface_dict.setdefault(item["interface"], {})["type"] = interface_type + interface_dict.setdefault(canonical_name, {})["type"] = interface_type for item in ip_list: - interface_dict.setdefault(item["interface"], {})["ip_addresses"] = {"ip_address": item["ip_address"]} + canonical_name = canonical_interface_name(item["interface"]) + interface_dict.setdefault(canonical_name, {})["ip_addresses"] = {"ip_address": item["ip_address"]} for item in prefix_list: - interface_dict.setdefault(item["interface"], {}).setdefault("ip_addresses", {})["prefix_length"] = item[ + canonical_name = canonical_interface_name(item["interface"]) + interface_dict.setdefault(canonical_name, {}).setdefault("ip_addresses", {})["prefix_length"] = item[ "prefix_length" ] for item in mac_list: - interface_dict.setdefault(item["interface"], {})["mac_address"] = item["mac_address"] + canonical_name = canonical_interface_name(item["interface"]) + interface_dict.setdefault(canonical_name, {})["mac_address"] = item["mac_address"] for item in description_list: - interface_dict.setdefault(item["interface"], {})["description"] = item["description"] + canonical_name = canonical_interface_name(item["interface"]) + interface_dict.setdefault(canonical_name, {})["description"] = item["description"] for item in link_status_list: - interface_dict.setdefault(item["interface"], {})["link_status"] = True if item["link_status"] == "up" else False - # for item in mode_list: - # interface_dict.setdefault(item["interface"], {})["802.1Q_mode"] = ( - # "access" if item["mode"] == "access" else "tagged" if item["mode"] == "trunk" else "" - # ) - - default_values = {"lag": "", "untagged_vlan": {}, "tagged_vlans": [], "vrf": {}, "802.1Q_mode": ""} + canonical_name = canonical_interface_name(item["interface"]) + interface_dict.setdefault(canonical_name, {})["link_status"] = True if item["link_status"] == "up" else False vlan_map = {vlan["vlan_id"]: vlan["vlan_name"] for vlan in vlan_list} - # print(f"vlan_map: {vlan_map}, interface_vlan_list: {interface_vlan_list}") - # for item in interface_vlan_list: - # try: - # if not item["interface"]: - # continue - # canonical_name = canonical_interface_name(item["interface"]) - # interface_dict.setdefault(canonical_name, {}) - # mode = item["mode"] - # trunking_vlans = item["trunking_vlans"] - # if mode == "trunk" and trunking_vlans == "1-4094": - # interface_dict[canonical_name]["802.1Q_mode"] = "tagged-all" - # interface_dict[canonical_name]["untagged_vlan"] = { - # "name": vlan_map[item["native_vlan"]], - # "id": item["native_vlan"], - # } - # elif mode == "access": - # interface_dict[canonical_name]["802.1Q_mode"] = "access" - # interface_dict[canonical_name]["untagged_vlan"] = { - # "name": item["access_vlan_name"], - # "id": item["access_vlan"], - # } - # elif mode == "trunk" and trunking_vlans != "1-4094": - # print(f"trunking_vlans: {trunking_vlans}") - # pass - # interface_dict[canonical_name]["802.1Q_mode"] = "tagged" - # tagged_vlans = [] - # trunking_vlans = trunking_vlans.split(",") - # for vlan_id in trunking_vlans: - # print(f"vlan_id: {vlan_id}") - # if "-" in vlan_id: - # start, end = map(int, vlan_id.split("-")) - # for id in range(start, end + 1): - # if str(id) not in vlan_map: - # print(f"Error: VLAN {id} found on interface, but not found in vlan db.") - # else: - # tagged_vlans.append({"name": vlan_map[str(id)], "id": str(id)}) - # else: - # if vlan_id not in vlan_map: - # print(f"Error: VLAN {vlan_id} found on interface, but not found in vlan db.") - # else: - # tagged_vlans.append({"name": vlan_map[vlan_id], "id": vlan_id}) - - # interface_dict[canonical_name]["tagged_vlans"] = tagged_vlans - # interface_dict[canonical_name]["untagged_vlan"] = { - # "name": vlan_map[item["native_vlan"]], - # "id": item["native_vlan"], - # } - # else: - # interface_dict[canonical_name]["802.1Q_mode"] = "" - # except KeyError as e: - # print(f"Error: VLAN not found on interface for interface {canonical_name} {e}") - # continue - for interface in interface_dict.values(): - for key, default in default_values.items(): - interface.setdefault(key, default) + for item in interface_vlan_list: + try: + if not item["interface"]: + continue + canonical_name = canonical_interface_name(item["interface"]) + + interface_dict.setdefault(canonical_name, {}) + mode = item["mode"] + trunking_vlans = item["trunking_vlans"] + if mode == "trunk" and trunking_vlans == "1-4094": + interface_dict[canonical_name]["802.1Q_mode"] = "tagged-all" + interface_dict[canonical_name]["untagged_vlan"] = { + "name": vlan_map[item["native_vlan"]], + "id": item["native_vlan"], + } + interface_dict[canonical_name]["tagged_vlans"] = [] + + elif mode == "access": + interface_dict[canonical_name]["802.1Q_mode"] = "access" + interface_dict[canonical_name]["untagged_vlan"] = { + "name": item["access_vlan_name"], + "id": item["access_vlan"], + } + interface_dict[canonical_name]["tagged_vlans"] = [] + + elif mode == "trunk" and trunking_vlans != "1-4094": + + tagged_vlans = [] + trunking_vlans = vlanconfig_to_list(trunking_vlans) + + for vlan_id in trunking_vlans: + + if vlan_id not in vlan_map: + continue + + else: + tagged_vlans.append({"name": vlan_map[vlan_id], "id": vlan_id}) + interface_dict[canonical_name]["802.1Q_mode"] = "tagged" + interface_dict[canonical_name]["tagged_vlans"] = tagged_vlans + interface_dict[canonical_name]["untagged_vlan"] = { + "name": vlan_map[item["native_vlan"]], + "id": item["native_vlan"], + } + + else: + + interface_dict[canonical_name]["802.1Q_mode"] = "" + interface_dict[canonical_name]["untagged_vlan"] = {} + interface_dict[canonical_name]["tagged_vlans"] = [] + except KeyError as e: + print(f"Error: VLAN not found on interface for interface {canonical_name} {e}") + continue for interface, data in interface_dict.items(): ip_addresses = data.get("ip_addresses", {}) if ip_addresses: data["ip_addresses"] = [ip_addresses] + else: + data["ip_addresses"] = [] # Convert interface names to canonical form interface_list = [] @@ -460,11 +487,12 @@ def format_nxos_results(device): del device["mac_address"] del device["description"] del device["link_status"] - # del device["mode"] + del device["mode"] del device["vrf_rds"] del device["vrf_interfaces"] del device["vlans"] del device["interface_vlans"] + del device["interface"] return device From eeafb174c1c670dd4ccb82ce22c0ad14ac826e47 Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Wed, 17 Apr 2024 22:18:17 +0000 Subject: [PATCH 215/225] linting --- nautobot_device_onboarding/command_mappers/cisco_ios.yml | 2 +- nautobot_device_onboarding/command_mappers/cisco_nxos.yml | 3 +-- nautobot_device_onboarding/jinja_filters.py | 2 +- nautobot_device_onboarding/nornir_plays/formatter.py | 8 ++------ 4 files changed, 5 insertions(+), 10 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/cisco_ios.yml b/nautobot_device_onboarding/command_mappers/cisco_ios.yml index 1c810d03..69ead20c 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_ios.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_ios.yml @@ -83,4 +83,4 @@ network_importer: commands: - command: "show interfaces switchport" use_textfsm: true - jpath: "[*].{interface: interface, admin_mode: admin_mode, access_vlan: access_vlan, native_vlan: native_vlan, trunking_vlans: trunking_vlans, voice_vlan: voice_vlan}" \ No newline at end of file + jpath: "[*].{interface: interface, admin_mode: admin_mode, access_vlan: access_vlan, native_vlan: native_vlan, trunking_vlans: trunking_vlans, voice_vlan: voice_vlan}" diff --git a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml index 7cf0d47e..78ac56f6 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml @@ -44,7 +44,7 @@ network_importer: jpath: "[*].{interface: interface, type: hardware_type}" ip_addresses: commands: - - command: "show ip interface vrf all" + - command: "show ip interface vrf all" use_textfsm: true jpath: "[*].{interface: interface, ip_address: ip_address}" prefix_length: @@ -52,7 +52,6 @@ network_importer: - command: "show ip interface vrf all" use_textfsm: true jpath: "[*].{interface: interface, prefix_length: subnet}" - #post_processor: "[*].{interface: interface, prefix_length: subnet && map(&subnet, [?ip_address=='{{ obj }}'])}" mtu: commands: - command: "show interface" diff --git a/nautobot_device_onboarding/jinja_filters.py b/nautobot_device_onboarding/jinja_filters.py index 59ebf9a6..2745fb9b 100755 --- a/nautobot_device_onboarding/jinja_filters.py +++ b/nautobot_device_onboarding/jinja_filters.py @@ -38,7 +38,7 @@ def fix_interfaces(interfaces): @library.filter def collapse_list_to_dict(original_data): - """Takes a list of dictionaries and creates a dictionary based on outtermost key + """Takes a list of dictionaries and creates a dictionary based on outtermost key. Args: original_data (list): list of dictionaries diff --git a/nautobot_device_onboarding/nornir_plays/formatter.py b/nautobot_device_onboarding/nornir_plays/formatter.py index c8d0bc80..9ad0b1f2 100755 --- a/nautobot_device_onboarding/nornir_plays/formatter.py +++ b/nautobot_device_onboarding/nornir_plays/formatter.py @@ -126,6 +126,7 @@ def ensure_list(data): def extract_prefix_from_subnet(prefix_list): + """Extract the prefix length from the IP/Prefix.""" for item in prefix_list: if "prefix_length" in item and item["prefix_length"]: item["prefix_length"] = item["prefix_length"].split("/")[-1] @@ -170,10 +171,8 @@ def format_ios_results(device): for item in type_list: interface_type = map_interface_type(item["type"]) interface_dict.setdefault(item["interface"], {})["type"] = interface_type - for item in ip_list: interface_dict.setdefault(item["interface"], {})["ip_addresses"] = {"ip_address": item["ip_address"]} - for item in prefix_list: interface_dict.setdefault(item["interface"], {}).setdefault("ip_addresses", {})["prefix_length"] = item[ "prefix_length" @@ -355,7 +354,6 @@ def format_nxos_results(device): for item in interface_list: canonical_name = canonical_interface_name(item["interface"]) interface_dict[canonical_name] = {**default_values} - for item in mtu_list: canonical_name = canonical_interface_name(item["interface"]) interface_dict.setdefault(canonical_name, {})["mtu"] = item["mtu"] @@ -382,16 +380,15 @@ def format_nxos_results(device): interface_dict.setdefault(canonical_name, {})["link_status"] = True if item["link_status"] == "up" else False vlan_map = {vlan["vlan_id"]: vlan["vlan_name"] for vlan in vlan_list} - for item in interface_vlan_list: try: if not item["interface"]: continue canonical_name = canonical_interface_name(item["interface"]) - interface_dict.setdefault(canonical_name, {}) mode = item["mode"] trunking_vlans = item["trunking_vlans"] + if mode == "trunk" and trunking_vlans == "1-4094": interface_dict[canonical_name]["802.1Q_mode"] = "tagged-all" interface_dict[canonical_name]["untagged_vlan"] = { @@ -518,7 +515,6 @@ def format_results(compiled_results): if platform not in ["cisco_ios", "cisco_xe", "cisco_nxos"]: data.update({"failed": True, "failed_reason": f"Unsupported platform {platform}"}) if "type" in data: - serial = Device.objects.get(name=device).serial if serial == "": data.update({"failed": True, "failed_reason": "Serial not found for device in Nautobot."}) From 3edbc42a5e58a9751a1a7c482838a5e12d54a92e Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Wed, 17 Apr 2024 20:54:11 -0500 Subject: [PATCH 216/225] fix example dict keys --- nautobot_device_onboarding/jinja_filters.py | 34 ++++++++++----------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/nautobot_device_onboarding/jinja_filters.py b/nautobot_device_onboarding/jinja_filters.py index 59ebf9a6..414f95f1 100755 --- a/nautobot_device_onboarding/jinja_filters.py +++ b/nautobot_device_onboarding/jinja_filters.py @@ -28,10 +28,10 @@ def fix_interfaces(interfaces): int_values["ip_addresses"].append( {"ip_address": int_values.get("ip_address", ""), "prefix_length": int_values.get("prefix_length", "")} ) - if "up" in int_values["jeff_status"]: - int_values["jeff_status"] = True + if "up" in int_values["link_status"]: + int_values["link_status"] = True else: - int_values["jeff_status"] = False + int_values["link_status"] = False return interfaces @@ -46,22 +46,22 @@ def collapse_list_to_dict(original_data): Example: >>> example_data = [ - {'GigabitEthernet1': {'jeff_status': 'up'}}, - {'GigabitEthernet2': {'jeff_status': 'administratively down'}}, - {'GigabitEthernet3': {'jeff_status': 'administratively down'}}, - {'GigabitEthernet4': {'jeff_status': 'administratively down'}}, - {'Loopback0': {'jeff_status': 'administratively down'}}, - {'Loopback2': {'jeff_status': 'administratively down'}}, - {'Port-channel1': {'jeff_status': 'down'}} + {'GigabitEthernet1': {'link_status': 'up'}}, + {'GigabitEthernet2': {'link_status': 'administratively down'}}, + {'GigabitEthernet3': {'link_status': 'administratively down'}}, + {'GigabitEthernet4': {'link_status': 'administratively down'}}, + {'Loopback0': {'link_status': 'administratively down'}}, + {'Loopback2': {'link_status': 'administratively down'}}, + {'Port-channel1': {'link_status': 'down'}} ] >>> collapse_list_to_dict(example_data) - {'GigabitEthernet1': {'jeff_status': 'up'}, - 'GigabitEthernet2': {'jeff_status': 'administratively down'}, - 'GigabitEthernet3': {'jeff_status': 'administratively down'}, - 'GigabitEthernet4': {'jeff_status': 'administratively down'}, - 'Loopback0': {'jeff_status': 'administratively down'}, - 'Loopback2': {'jeff_status': 'administratively down'}, - 'Port-channel1': {'jeff_status': 'down'}} + {'GigabitEthernet1': {'link_status': 'up'}, + 'GigabitEthernet2': {'link_status': 'administratively down'}, + 'GigabitEthernet3': {'link_status': 'administratively down'}, + 'GigabitEthernet4': {'link_status': 'administratively down'}, + 'Loopback0': {'link_status': 'administratively down'}, + 'Loopback2': {'link_status': 'administratively down'}, + 'Port-channel1': {'link_status': 'down'}} """ return {root_key: data for data in original_data for root_key, data in data.items()} From 6e2d1b9cff1aa0352212513126c8640168006a54 Mon Sep 17 00:00:00 2001 From: David Cates Date: Thu, 18 Apr 2024 08:30:40 -0700 Subject: [PATCH 217/225] remove RD from vrf sync --- .../adapters/network_importer_adapters.py | 3 --- .../models/network_importer_models.py | 22 ++++++++++++------- 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py index 1523ab46..8673ff69 100644 --- a/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py +++ b/nautobot_device_onboarding/diffsync/adapters/network_importer_adapters.py @@ -213,7 +213,6 @@ def load_vrfs(self): network_vrf = self.vrf( diffsync=self, name=vrf.name, - rd=vrf.rd if vrf.rd else None, namespace__name=vrf.namespace.name, ) try: @@ -234,7 +233,6 @@ def load_vrf_to_interface(self): vrf = {} if interface.vrf: vrf["name"] = interface.vrf.name - vrf["rd"] = str(interface.vrf.rd) network_vrf_to_interface = self.vrf_to_interface( diffsync=self, @@ -529,7 +527,6 @@ def load_vrfs(self): network_vrf = self.vrf( diffsync=self, name=interface_data["vrf"]["name"], - rd=interface_data["vrf"]["rd"] if interface_data["vrf"]["rd"] else None, namespace__name=self.job.namespace.name, ) try: diff --git a/nautobot_device_onboarding/diffsync/models/network_importer_models.py b/nautobot_device_onboarding/diffsync/models/network_importer_models.py index 1f7372be..93a3532c 100644 --- a/nautobot_device_onboarding/diffsync/models/network_importer_models.py +++ b/nautobot_device_onboarding/diffsync/models/network_importer_models.py @@ -1,6 +1,6 @@ """Diffsync models.""" -from typing import List, Optional, Union +from typing import List, Optional from diffsync import DiffSync, DiffSyncModel from diffsync import exceptions as diffsync_exceptions @@ -460,15 +460,13 @@ class NetworkImporterVRF(FilteredNautobotModel): _modelname = "vrf" _model = VRF _identifiers = ("name", "namespace__name") - _attributes = ("rd",) - rd: Union[str, None] name: str namespace__name: str class NetworkImporterVrfToInterface(DiffSyncModel): - """Shared data model representing a UnTaggedVlanToInterface.""" + """Shared data model representing a VrfToInterface.""" _modelname = "vrf_to_interface" _identifiers = ("device__name", "name") @@ -485,16 +483,24 @@ def _get_and_assign_vrf(cls, diffsync, attrs, interface): try: vrf = VRF.objects.get( name=attrs["vrf"]["name"], - rd=attrs["vrf"]["rd"] if attrs["vrf"]["rd"] else None, + rd=None, namespace=diffsync.job.namespace, ) except ObjectDoesNotExist: diffsync.job.logger.error( f"Failed to assign vrf to {interface.device}:{interface}, unable to locate a vrf with attributes " - f"[name: {attrs['vrf']['name']}, rd: {attrs['vrf']['rd']} " + f"[name: {attrs['vrf']['name']}" f"namespace: {diffsync.job.namespace}]" ) raise diffsync_exceptions.ObjectNotCreated + except MultipleObjectsReturned: + diffsync.job.logger.error( + f"Failed to assign vrf to {interface.device}:{interface}, there are multipple vrfs with attributes " + f"[name: {attrs['vrf']['name']}" + f"namespace: {diffsync.job.namespace}]. " + "Unsure which to assign." + ) + raise diffsync_exceptions.ObjectNotCreated try: vrf.devices.add(interface.device) vrf.validated_save() @@ -505,13 +511,13 @@ def _get_and_assign_vrf(cls, diffsync, attrs, interface): @classmethod def create(cls, diffsync, ids, attrs): - """Assign an untagged vlan to an interface.""" + """Assign a vrf to an interface.""" if attrs.get("vrf"): try: interface = Interface.objects.get(device__name=ids["device__name"], name=ids["name"]) except ObjectDoesNotExist: diffsync.job.logger.error( - f"Failed to assign vrf {attrs['untagged_vlan']}. An interface with attributes: " + f"Failed to assign vrf {attrs['vrf']}. An interface with attributes: " f"[device__name: {ids['device__name']} name: {ids['name']}] was not found." ) raise diffsync_exceptions.ObjectNotCreated From 167e58e8c6fa99c4fc43cef7ecad5bc3a4f620c8 Mon Sep 17 00:00:00 2001 From: David Cates Date: Thu, 18 Apr 2024 09:24:32 -0700 Subject: [PATCH 218/225] update mock data --- nautobot_device_onboarding/diffsync/mock_data.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/nautobot_device_onboarding/diffsync/mock_data.py b/nautobot_device_onboarding/diffsync/mock_data.py index 5f1aa100..418b8bdd 100644 --- a/nautobot_device_onboarding/diffsync/mock_data.py +++ b/nautobot_device_onboarding/diffsync/mock_data.py @@ -20,7 +20,7 @@ "lag": "", "untagged_vlan": {"name": "vlan60", "id": "60"}, "tagged_vlans": [{"name": "vlan40", "id": "40"}], - "vrf": {"name": "vrf1", "rd": "65000:1"}, + "vrf": {"name": "vrf1"}, } }, { @@ -55,7 +55,7 @@ "lag": "Po1", "untagged_vlan": {}, "tagged_vlans": [{"name": "vlan40", "id": "40"}, {"name": "vlan50", "id": "50"}], - "vrf": {"name": "mgmt", "rd": "65500:1"}, + "vrf": {"name": "mgmt"}, } }, { @@ -86,7 +86,7 @@ "lag": "", "untagged_vlan": {}, "tagged_vlans": [], - "vrf": {"name": "mgmt", "rd": "65500:1"}, + "vrf": {"name": "mgmt"}, } }, { @@ -101,7 +101,7 @@ "lag": "", "untagged_vlan": "", "tagged_vlans": [], - "vrf": {"name": "mgmt", "rd": "65500:1"}, + "vrf": {"name": "mgmt"}, } }, ], @@ -123,7 +123,7 @@ "lag": "", "untagged_vlan": {"name": "vlan60", "id": "60"}, "tagged_vlans": [{"name": "vlan40", "id": "40"}], - "vrf": {"name": "mgmt", "rd": "65500:1"}, + "vrf": {"name": "mgmt"}, } }, { @@ -158,7 +158,7 @@ "lag": "Po1", "untagged_vlan": {}, "tagged_vlans": [{"name": "vlan40", "id": "40"}, {"name": "vlan50", "id": "50"}], - "vrf": {"name": "mgmt", "rd": "65500:1"}, + "vrf": {"name": "mgmt"}, } }, { @@ -173,7 +173,7 @@ "lag": "", "untagged_vlan": {}, "tagged_vlans": [], - "vrf": {"name": "mgmt", "rd": "65500:1"}, + "vrf": {"name": "mgmt"}, } }, ], From 4829af57c60d3f9f8fb6c58e4100c35d2c8f7924 Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Thu, 18 Apr 2024 17:37:22 +0000 Subject: [PATCH 219/225] removed rd from vrf --- .../command_mappers/cisco_ios.yml | 2 +- nautobot_device_onboarding/nornir_plays/formatter.py | 10 +--------- 2 files changed, 2 insertions(+), 10 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/cisco_ios.yml b/nautobot_device_onboarding/command_mappers/cisco_ios.yml index 69ead20c..8feb464f 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_ios.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_ios.yml @@ -73,7 +73,7 @@ network_importer: commands: - command: "show vrf" use_textfsm: true - jpath: "[*].{name: name, default_rd: default_rd, interfaces: interfaces}" + jpath: "[*].{name: name, interfaces: interfaces}" vlans: commands: - command: "show vlan" diff --git a/nautobot_device_onboarding/nornir_plays/formatter.py b/nautobot_device_onboarding/nornir_plays/formatter.py index 9ad0b1f2..c66f98d1 100755 --- a/nautobot_device_onboarding/nornir_plays/formatter.py +++ b/nautobot_device_onboarding/nornir_plays/formatter.py @@ -272,12 +272,10 @@ def format_ios_results(device): if vrf["default_rd"] == "": interface_dict[canonical_name]["vrf"] = { "name": vrf["name"], - "rd": "", } else: interface_dict[canonical_name]["vrf"] = { "name": vrf["name"], - "rd": vrf["default_rd"], } except KeyError: print(f"Error: VRF configuration on interface {interface} not as expected.") @@ -463,13 +461,7 @@ def format_nxos_results(device): if canonical_name.startswith("VLAN"): canonical_name = canonical_name.replace("VLAN", "Vlan", 1) interface_dict.setdefault(canonical_name, {}) - if vrf["default_rd"] == "0:0": - interface_dict[canonical_name]["vrf"] = { - "name": vrf["name"], - "rd": "", - } - else: - interface_dict[canonical_name]["vrf"] = {"name": vrf["name"], "rd": vrf["default_rd"]} + interface_dict[canonical_name]["vrf"] = {"name": vrf["name"]} except KeyError: print(f"Error: VRF configuration on interface {interface} not as expected.") continue From 217e26b9a77e285e4ff3ad246a89c7c77a729d97 Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Thu, 18 Apr 2024 13:15:00 -0500 Subject: [PATCH 220/225] add wlc mapper and support for do --- .../command_mappers/cisco_wlc_ssh.yml | 28 +++++++++++++++++++ nautobot_device_onboarding/constants.py | 1 + 2 files changed, 29 insertions(+) create mode 100755 nautobot_device_onboarding/command_mappers/cisco_wlc_ssh.yml diff --git a/nautobot_device_onboarding/command_mappers/cisco_wlc_ssh.yml b/nautobot_device_onboarding/command_mappers/cisco_wlc_ssh.yml new file mode 100755 index 00000000..2cfd8f6d --- /dev/null +++ b/nautobot_device_onboarding/command_mappers/cisco_wlc_ssh.yml @@ -0,0 +1,28 @@ +--- +device_onboarding: + hostname: + commands: + - command: "show sysinfo" + use_textfsm: true + jpath: "[*].system_name" + serial: + commands: + - command: "show inventory" + use_textfsm: true + jpath: "[*].sn" + device_type: + commands: + - command: "show inventory" + use_textfsm: true + jpath: "[*].pid" + mgmt_interface: + commands: + - command: "show interface detailed management" + use_textfsm: true + jpath: "[*].interface_name" + mask_length: + commands: + - command: "show interface detailed management" + use_textfsm: true + jpath: "[*].netmask" + post_processor: "{{ obj[0] | netmask_to_cidr }}" diff --git a/nautobot_device_onboarding/constants.py b/nautobot_device_onboarding/constants.py index cd5eb4ed..9a90c2f6 100644 --- a/nautobot_device_onboarding/constants.py +++ b/nautobot_device_onboarding/constants.py @@ -15,6 +15,7 @@ "juniper_junos": "junos", "cisco_xr": "iosxr", "cisco_wlc": "cisco_wlc", + "cisco_wlc_ssh": "cisco_wlc_ssh", } # This should potentially be removed and used nautobot core directly choices. From 5c5c4d6a2905a8bab2a9d000ecc19e49654ba701 Mon Sep 17 00:00:00 2001 From: David Cates Date: Thu, 18 Apr 2024 11:30:53 -0700 Subject: [PATCH 221/225] update jinja_filters.py, lock and bump version --- nautobot_device_onboarding/jinja_filters.py | 23 +- poetry.lock | 635 ++++++++++---------- pyproject.toml | 2 +- 3 files changed, 329 insertions(+), 331 deletions(-) diff --git a/nautobot_device_onboarding/jinja_filters.py b/nautobot_device_onboarding/jinja_filters.py index 93c61de2..010dd760 100755 --- a/nautobot_device_onboarding/jinja_filters.py +++ b/nautobot_device_onboarding/jinja_filters.py @@ -79,16 +79,15 @@ def merge_dicts(*dicts): return {} # Empty input returns an empty dictionary merged = dicts[0].copy() for other_dict in dicts[1:]: - if not other_dict: - continue # Skip empty dictionaries - for key, value in other_dict.items(): - if key in merged: - if isinstance(value, dict) and isinstance(merged[key], dict): - # Recursively merge nested dictionaries - merged[key] = merge_dicts(merged[key], value) - else: - # Overwrite existing values with values from subsequent dictionaries (giving priority to later ones) - merged[key] = value - # Add new key-value pairs from subsequent dictionaries - merged[key] = value + if other_dict: + for key, value in other_dict.items(): + if key in merged: + if isinstance(value, dict) and isinstance(merged[key], dict): + # Recursively merge nested dictionaries + merged[key] = merge_dicts(merged[key], value) + else: + # Overwrite existing values with values from subsequent dictionaries (giving priority to later ones) + merged[key] = value + # Add new key-value pairs from subsequent dictionaries + # merged[key] = value return merged diff --git a/poetry.lock b/poetry.lock index baca4e6d..36f8c728 100755 --- a/poetry.lock +++ b/poetry.lock @@ -286,33 +286,33 @@ files = [ [[package]] name = "black" -version = "24.3.0" +version = "24.4.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-24.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7d5e026f8da0322b5662fa7a8e752b3fa2dac1c1cbc213c3d7ff9bdd0ab12395"}, - {file = "black-24.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9f50ea1132e2189d8dff0115ab75b65590a3e97de1e143795adb4ce317934995"}, - {file = "black-24.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2af80566f43c85f5797365077fb64a393861a3730bd110971ab7a0c94e873e7"}, - {file = "black-24.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:4be5bb28e090456adfc1255e03967fb67ca846a03be7aadf6249096100ee32d0"}, - {file = "black-24.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f1373a7808a8f135b774039f61d59e4be7eb56b2513d3d2f02a8b9365b8a8a9"}, - {file = "black-24.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aadf7a02d947936ee418777e0247ea114f78aff0d0959461057cae8a04f20597"}, - {file = "black-24.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c02e4ea2ae09d16314d30912a58ada9a5c4fdfedf9512d23326128ac08ac3d"}, - {file = "black-24.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:bf21b7b230718a5f08bd32d5e4f1db7fc8788345c8aea1d155fc17852b3410f5"}, - {file = "black-24.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2818cf72dfd5d289e48f37ccfa08b460bf469e67fb7c4abb07edc2e9f16fb63f"}, - {file = "black-24.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4acf672def7eb1725f41f38bf6bf425c8237248bb0804faa3965c036f7672d11"}, - {file = "black-24.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7ed6668cbbfcd231fa0dc1b137d3e40c04c7f786e626b405c62bcd5db5857e4"}, - {file = "black-24.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:56f52cfbd3dabe2798d76dbdd299faa046a901041faf2cf33288bc4e6dae57b5"}, - {file = "black-24.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:79dcf34b33e38ed1b17434693763301d7ccbd1c5860674a8f871bd15139e7837"}, - {file = "black-24.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e19cb1c6365fd6dc38a6eae2dcb691d7d83935c10215aef8e6c38edee3f77abd"}, - {file = "black-24.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b76c275e4c1c5ce6e9870911384bff5ca31ab63d19c76811cb1fb162678213"}, - {file = "black-24.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:b5991d523eee14756f3c8d5df5231550ae8993e2286b8014e2fdea7156ed0959"}, - {file = "black-24.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c45f8dff244b3c431b36e3224b6be4a127c6aca780853574c00faf99258041eb"}, - {file = "black-24.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6905238a754ceb7788a73f02b45637d820b2f5478b20fec82ea865e4f5d4d9f7"}, - {file = "black-24.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7de8d330763c66663661a1ffd432274a2f92f07feeddd89ffd085b5744f85e7"}, - {file = "black-24.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:7bb041dca0d784697af4646d3b62ba4a6b028276ae878e53f6b4f74ddd6db99f"}, - {file = "black-24.3.0-py3-none-any.whl", hash = "sha256:41622020d7120e01d377f74249e677039d20e6344ff5851de8a10f11f513bf93"}, - {file = "black-24.3.0.tar.gz", hash = "sha256:a0c9c4a0771afc6919578cec71ce82a3e31e054904e7197deacbc9382671c41f"}, + {file = "black-24.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6ad001a9ddd9b8dfd1b434d566be39b1cd502802c8d38bbb1ba612afda2ef436"}, + {file = "black-24.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3a3a092b8b756c643fe45f4624dbd5a389f770a4ac294cf4d0fce6af86addaf"}, + {file = "black-24.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dae79397f367ac8d7adb6c779813328f6d690943f64b32983e896bcccd18cbad"}, + {file = "black-24.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:71d998b73c957444fb7c52096c3843875f4b6b47a54972598741fe9a7f737fcb"}, + {file = "black-24.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8e5537f456a22cf5cfcb2707803431d2feeb82ab3748ade280d6ccd0b40ed2e8"}, + {file = "black-24.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:64e60a7edd71fd542a10a9643bf369bfd2644de95ec71e86790b063aa02ff745"}, + {file = "black-24.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cd5b4f76056cecce3e69b0d4c228326d2595f506797f40b9233424e2524c070"}, + {file = "black-24.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:64578cf99b6b46a6301bc28bdb89f9d6f9b592b1c5837818a177c98525dbe397"}, + {file = "black-24.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f95cece33329dc4aa3b0e1a771c41075812e46cf3d6e3f1dfe3d91ff09826ed2"}, + {file = "black-24.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4396ca365a4310beef84d446ca5016f671b10f07abdba3e4e4304218d2c71d33"}, + {file = "black-24.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44d99dfdf37a2a00a6f7a8dcbd19edf361d056ee51093b2445de7ca09adac965"}, + {file = "black-24.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:21f9407063ec71c5580b8ad975653c66508d6a9f57bd008bb8691d273705adcd"}, + {file = "black-24.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:652e55bb722ca026299eb74e53880ee2315b181dfdd44dca98e43448620ddec1"}, + {file = "black-24.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7f2966b9b2b3b7104fca9d75b2ee856fe3fdd7ed9e47c753a4bb1a675f2caab8"}, + {file = "black-24.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bb9ca06e556a09f7f7177bc7cb604e5ed2d2df1e9119e4f7d2f1f7071c32e5d"}, + {file = "black-24.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:d4e71cdebdc8efeb6deaf5f2deb28325f8614d48426bed118ecc2dcaefb9ebf3"}, + {file = "black-24.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6644f97a7ef6f401a150cca551a1ff97e03c25d8519ee0bbc9b0058772882665"}, + {file = "black-24.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:75a2d0b4f5eb81f7eebc31f788f9830a6ce10a68c91fbe0fade34fff7a2836e6"}, + {file = "black-24.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb949f56a63c5e134dfdca12091e98ffb5fd446293ebae123d10fc1abad00b9e"}, + {file = "black-24.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:7852b05d02b5b9a8c893ab95863ef8986e4dda29af80bbbda94d7aee1abf8702"}, + {file = "black-24.4.0-py3-none-any.whl", hash = "sha256:74eb9b5420e26b42c00a3ff470dc0cd144b80a766128b1771d07643165e08d0e"}, + {file = "black-24.4.0.tar.gz", hash = "sha256:f07b69fda20578367eaebbd670ff8fc653ab181e1ff95d84497f9fa20e7d0641"}, ] [package.dependencies] @@ -892,37 +892,37 @@ files = [ [[package]] name = "django-celery-beat" -version = "2.5.0" +version = "2.6.0" description = "Database-backed Periodic Tasks." optional = false python-versions = "*" files = [ - {file = "django-celery-beat-2.5.0.tar.gz", hash = "sha256:cd0a47f5958402f51ac0c715bc942ae33d7b50b4e48cba91bc3f2712be505df1"}, - {file = "django_celery_beat-2.5.0-py3-none-any.whl", hash = "sha256:ae460faa5ea142fba0875409095d22f6bd7bcc7377889b85e8cab5c0dfb781fe"}, + {file = "django-celery-beat-2.6.0.tar.gz", hash = "sha256:f75b2d129731f1214be8383e18fae6bfeacdb55dffb2116ce849222c0106f9ad"}, ] [package.dependencies] "backports.zoneinfo" = {version = "*", markers = "python_version < \"3.9\""} celery = ">=5.2.3,<6.0" cron-descriptor = ">=1.2.32" -Django = ">=2.2,<5.0" +Django = ">=2.2,<5.1" django-timezone-field = ">=5.0" python-crontab = ">=2.3.4" tzdata = "*" [[package]] name = "django-celery-results" -version = "2.4.0" +version = "2.5.1" description = "Celery result backends for Django." optional = false python-versions = "*" files = [ - {file = "django_celery_results-2.4.0-py3-none-any.whl", hash = "sha256:be91307c02fbbf0dda21993c3001c60edb74595444ccd6ad696552fe3689e85b"}, - {file = "django_celery_results-2.4.0.tar.gz", hash = "sha256:75aa51970db5691cbf242c6a0ff50c8cdf419e265cd0e9b772335d06436c4b99"}, + {file = "django_celery_results-2.5.1-py3-none-any.whl", hash = "sha256:0da4cd5ecc049333e4524a23fcfc3460dfae91aa0a60f1fae4b6b2889c254e01"}, + {file = "django_celery_results-2.5.1.tar.gz", hash = "sha256:3ecb7147f773f34d0381bac6246337ce4cf88a2ea7b82774ed48e518b67bb8fd"}, ] [package.dependencies] -celery = ">=5.2.3,<6.0" +celery = ">=5.2.7,<6.0" +Django = ">=3.2.18" [[package]] name = "django-constance" @@ -944,16 +944,17 @@ redis = ["redis"] [[package]] name = "django-cors-headers" -version = "4.2.0" +version = "4.3.1" description = "django-cors-headers is a Django application for handling the server headers required for Cross-Origin Resource Sharing (CORS)." optional = false python-versions = ">=3.8" files = [ - {file = "django_cors_headers-4.2.0-py3-none-any.whl", hash = "sha256:9ada212b0e2efd4a5e339360ffc869cb21ac5605e810afe69f7308e577ea5bde"}, - {file = "django_cors_headers-4.2.0.tar.gz", hash = "sha256:f9749c6410fe738278bc2b6ef17f05195bc7b251693c035752d8257026af024f"}, + {file = "django-cors-headers-4.3.1.tar.gz", hash = "sha256:0bf65ef45e606aff1994d35503e6b677c0b26cafff6506f8fd7187f3be840207"}, + {file = "django_cors_headers-4.3.1-py3-none-any.whl", hash = "sha256:0b1fd19297e37417fc9f835d39e45c8c642938ddba1acce0c1753d3edef04f36"}, ] [package.dependencies] +asgiref = ">=3.6" Django = ">=3.2" [[package]] @@ -1001,13 +1002,13 @@ Django = ">=3.2" [[package]] name = "django-filter" -version = "23.1" +version = "23.5" description = "Django-filter is a reusable Django application for allowing users to filter querysets dynamically." optional = false python-versions = ">=3.7" files = [ - {file = "django-filter-23.1.tar.gz", hash = "sha256:dee5dcf2cea4d7f767e271b6d01f767fce7500676d5e5dc58dac8154000b87df"}, - {file = "django_filter-23.1-py3-none-any.whl", hash = "sha256:e3c52ad83c32fb5882125105efb5fea2a1d6a85e7dc64b04ef52edbf14451b6c"}, + {file = "django-filter-23.5.tar.gz", hash = "sha256:67583aa43b91fe8c49f74a832d95f4d8442be628fd4c6d65e9f811f5153a4e5c"}, + {file = "django_filter-23.5-py3-none-any.whl", hash = "sha256:99122a201d83860aef4fe77758b69dda913e874cc5e0eaa50a86b0b18d708400"}, ] [package.dependencies] @@ -1015,13 +1016,13 @@ Django = ">=3.2" [[package]] name = "django-health-check" -version = "3.17.0" +version = "3.18.1" description = "Run checks on services like databases, queue servers, celery processes, etc." optional = false python-versions = ">=3.8" files = [ - {file = "django-health-check-3.17.0.tar.gz", hash = "sha256:d1b8671e79d1de6e3dd1a9c69566222b0bfcfacca8b90511a4407b2d0d3d2778"}, - {file = "django_health_check-3.17.0-py2.py3-none-any.whl", hash = "sha256:20dc5ccb516a4e7163593fd4026f0a7531e3027b47d23ebe3bd9dbc99ac4354c"}, + {file = "django-health-check-3.18.1.tar.gz", hash = "sha256:44552d55ae8950c9548d3b90f9d9fd5570b57446a19b2a8e674c82f993cb7a2c"}, + {file = "django_health_check-3.18.1-py2.py3-none-any.whl", hash = "sha256:2c89a326cd79830e2fc6808823a9e7e874ab23f7aef3ff2c4d1194c998e1dca1"}, ] [package.dependencies] @@ -1033,17 +1034,17 @@ test = ["celery", "pytest", "pytest-cov", "pytest-django", "redis"] [[package]] name = "django-jinja" -version = "2.10.2" +version = "2.11.0" description = "Jinja2 templating language integrated in Django." optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "django-jinja-2.10.2.tar.gz", hash = "sha256:bfdfbb55c1f5a679d69ad575d550c4707d386634009152efe014089f3c4d1412"}, - {file = "django_jinja-2.10.2-py3-none-any.whl", hash = "sha256:dd003ec1c95c0989eb28a538831bced62b1b61da551cb44a5dfd708fcf75589f"}, + {file = "django-jinja-2.11.0.tar.gz", hash = "sha256:47c06d3271e6b2f27d3596278af517bfe2e19c1eb36ae1c0b1cc302d7f0259af"}, + {file = "django_jinja-2.11.0-py3-none-any.whl", hash = "sha256:cc4c72246a6e346aa0574e0c56c3e534c1a20ef47b8476f05d7287781f69a0a9"}, ] [package.dependencies] -django = ">=2.2" +django = ">=3.2" jinja2 = ">=3" [[package]] @@ -1079,13 +1080,13 @@ prometheus-client = ">=0.7" [[package]] name = "django-redis" -version = "5.3.0" +version = "5.4.0" description = "Full featured redis cache backend for Django." optional = false python-versions = ">=3.6" files = [ - {file = "django-redis-5.3.0.tar.gz", hash = "sha256:8bc5793ec06b28ea802aad85ec437e7646511d4e571e07ccad19cfed8b9ddd44"}, - {file = "django_redis-5.3.0-py3-none-any.whl", hash = "sha256:2d8660d39f586c41c9907d5395693c477434141690fd7eca9d32376af00b0aac"}, + {file = "django-redis-5.4.0.tar.gz", hash = "sha256:6a02abaa34b0fea8bf9b707d2c363ab6adc7409950b2db93602e6cb292818c42"}, + {file = "django_redis-5.4.0-py3-none-any.whl", hash = "sha256:ebc88df7da810732e2af9987f7f426c96204bf89319df4c6da6ca9a2942edd5b"}, ] [package.dependencies] @@ -1114,13 +1115,13 @@ sqlparse = "*" [[package]] name = "django-tables2" -version = "2.6.0" +version = "2.7.0" description = "Table/data-grid framework for Django" optional = false python-versions = "*" files = [ - {file = "django-tables2-2.6.0.tar.gz", hash = "sha256:479eed04007cc04bcf764a6fb7a5e3955d94b878ba7f3a4bd4edbd2f7769e08d"}, - {file = "django_tables2-2.6.0-py2.py3-none-any.whl", hash = "sha256:04f23c1181d93716c67085a3c324b449180fd0c5162ef4619acb0b2d9a166133"}, + {file = "django-tables2-2.7.0.tar.gz", hash = "sha256:4113fcc575eb438a12e83a4d4ea01452e4800d970e8bdd0e4122ac171af1900d"}, + {file = "django_tables2-2.7.0-py2.py3-none-any.whl", hash = "sha256:99e06d966ca8ac69fd74092eb45c79a280dd5ca0ccb81395d96261f62128e1af"}, ] [package.dependencies] @@ -1161,13 +1162,13 @@ pytz = "*" [[package]] name = "django-tree-queries" -version = "0.16.1" +version = "0.17.0" description = "Tree queries with explicit opt-in, without configurability" optional = false python-versions = ">=3.8" files = [ - {file = "django_tree_queries-0.16.1-py3-none-any.whl", hash = "sha256:b57cebd85136897dc2d7d1da50f3944b13d4713009af655ae221c8202146c2f5"}, - {file = "django_tree_queries-0.16.1.tar.gz", hash = "sha256:5a7765bdbc78742ae7b206348aa674a7e39ef38069ac3854a51b330d25081c43"}, + {file = "django_tree_queries-0.17.0-py3-none-any.whl", hash = "sha256:df62cc7daa7a766483a8ae11618ff7649d74425b5d245e9644526f2dd2f51af0"}, + {file = "django_tree_queries-0.17.0.tar.gz", hash = "sha256:f115cf6756c55fde56bb876d5b5aa1b2bd33ae3d6e2949c3176ef0b4fb64c532"}, ] [package.extras] @@ -1196,18 +1197,18 @@ waitress = ["waitress"] [[package]] name = "djangorestframework" -version = "3.14.0" +version = "3.15.1" description = "Web APIs for Django, made easy." optional = false python-versions = ">=3.6" files = [ - {file = "djangorestframework-3.14.0-py3-none-any.whl", hash = "sha256:eb63f58c9f218e1a7d064d17a70751f528ed4e1d35547fdade9aaf4cd103fd08"}, - {file = "djangorestframework-3.14.0.tar.gz", hash = "sha256:579a333e6256b09489cbe0a067e66abe55c6595d8926be6b99423786334350c8"}, + {file = "djangorestframework-3.15.1-py3-none-any.whl", hash = "sha256:3ccc0475bce968608cf30d07fb17d8e52d1d7fc8bfe779c905463200750cbca6"}, + {file = "djangorestframework-3.15.1.tar.gz", hash = "sha256:f88fad74183dfc7144b2756d0d2ac716ea5b4c7c9840995ac3bfd8ec034333c1"}, ] [package.dependencies] +"backports.zoneinfo" = {version = "*", markers = "python_version < \"3.9\""} django = ">=3.0" -pytz = "*" [[package]] name = "drf-react-template-framework" @@ -1225,13 +1226,13 @@ djangorestframework = ">=3.12.0,<4.0.0" [[package]] name = "drf-spectacular" -version = "0.26.3" +version = "0.26.5" description = "Sane and flexible OpenAPI 3 schema generation for Django REST framework" optional = false python-versions = ">=3.6" files = [ - {file = "drf-spectacular-0.26.3.tar.gz", hash = "sha256:b907a72a0244e5dcfeca625e9632cd8ebccdbe2cb528b7c1de1191708be6f31e"}, - {file = "drf_spectacular-0.26.3-py3-none-any.whl", hash = "sha256:1d84ac70522baaadd6d84a25ce5fe5ea50cfcba0387856689f98ac536f14aa32"}, + {file = "drf-spectacular-0.26.5.tar.gz", hash = "sha256:aee55330a774ba8a9cbdb125714d1c9ee05a8aafd3ce3be8bfd26527649aeb44"}, + {file = "drf_spectacular-0.26.5-py3-none-any.whl", hash = "sha256:c0002a820b11771fdbf37853deb371947caf0159d1afeeffe7598e964bc1db94"}, ] [package.dependencies] @@ -1263,13 +1264,13 @@ Django = ">=2.2" [[package]] name = "emoji" -version = "2.8.0" +version = "2.11.0" description = "Emoji for Python" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" files = [ - {file = "emoji-2.8.0-py2.py3-none-any.whl", hash = "sha256:a8468fd836b7ecb6d1eac054c9a591701ce0ccd6c6f7779ad71b66f76664df90"}, - {file = "emoji-2.8.0.tar.gz", hash = "sha256:8d8b5dec3c507444b58890e598fc895fcec022b3f5acb49497c6ccc5208b8b00"}, + {file = "emoji-2.11.0-py2.py3-none-any.whl", hash = "sha256:63fc9107f06c6c2e48e5078ce9575cef98518f5ac09474f6148a43e989989582"}, + {file = "emoji-2.11.0.tar.gz", hash = "sha256:772eaa30f4e0b1ce95148a092df4c7dc97644532c03225326b0fd05e8a9f72a3"}, ] [package.extras] @@ -1484,13 +1485,13 @@ six = ">=1.12" [[package]] name = "griffe" -version = "0.42.1" +version = "0.43.0" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." optional = false python-versions = ">=3.8" files = [ - {file = "griffe-0.42.1-py3-none-any.whl", hash = "sha256:7e805e35617601355edcac0d3511cedc1ed0cb1f7645e2d336ae4b05bbae7b3b"}, - {file = "griffe-0.42.1.tar.gz", hash = "sha256:57046131384043ed078692b85d86b76568a686266cc036b9b56b704466f803ce"}, + {file = "griffe-0.43.0-py3-none-any.whl", hash = "sha256:5966a0d0d6b9189945765903703fb9fb8f7818640facd255c2a0867662137619"}, + {file = "griffe-0.43.0.tar.gz", hash = "sha256:b8c0a938db1d74840c6a76da5b3d8754ea680c6f43efab33958e28b95d3031d3"}, ] [package.dependencies] @@ -1555,13 +1556,13 @@ socks = ["socksio (==1.*)"] [[package]] name = "idna" -version = "3.6" +version = "3.7" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] [[package]] @@ -1755,13 +1756,13 @@ files = [ [[package]] name = "jsonschema" -version = "4.18.6" +version = "4.21.1" description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema-4.18.6-py3-none-any.whl", hash = "sha256:dc274409c36175aad949c68e5ead0853aaffbe8e88c830ae66bb3c7a1728ad2d"}, - {file = "jsonschema-4.18.6.tar.gz", hash = "sha256:ce71d2f8c7983ef75a756e568317bf54bc531dc3ad7e66a128eae0d51623d8a3"}, + {file = "jsonschema-4.21.1-py3-none-any.whl", hash = "sha256:7996507afae316306f9e2290407761157c6f78002dcf7419acb99822143d1c6f"}, + {file = "jsonschema-4.21.1.tar.gz", hash = "sha256:85727c00279f5fa6bedbe6238d2aa6403bedd8b4864ab11207d07df3cc1b2ee5"}, ] [package.dependencies] @@ -1793,20 +1794,19 @@ referencing = ">=0.31.0" [[package]] name = "junos-eznc" -version = "2.7.0" +version = "2.7.1" description = "Junos 'EZ' automation for non-programmers" optional = false python-versions = ">=3.8" files = [ - {file = "junos-eznc-2.7.0.tar.gz", hash = "sha256:a45c90641d24ff4c86796418ea76ca64066c06d0bf644d6b77e605bf957c5c7d"}, - {file = "junos_eznc-2.7.0-py2.py3-none-any.whl", hash = "sha256:27a665957b49cf4caec2047e33b1b62f3a3ece72a244d0b98e93df9c26c984a6"}, + {file = "junos-eznc-2.7.1.tar.gz", hash = "sha256:371f0298bf03e0cb4c017c43f6f4122263584eda0d690d0112e93f13daae41ac"}, + {file = "junos_eznc-2.7.1-py3-none-any.whl", hash = "sha256:8a7918faa8f0570341cac64c1210c1cd3e3542162d1e7449c3364f8d805716b2"}, ] [package.dependencies] jinja2 = ">=2.7.1" lxml = ">=3.2.4" ncclient = ">=0.6.15" -paramiko = ">=1.15.2" pyparsing = "*" pyserial = "*" PyYAML = ">=5.1" @@ -1817,13 +1817,13 @@ yamlordereddictloader = "*" [[package]] name = "kombu" -version = "5.3.6" +version = "5.3.7" description = "Messaging library for Python." optional = false python-versions = ">=3.8" files = [ - {file = "kombu-5.3.6-py3-none-any.whl", hash = "sha256:49f1e62b12369045de2662f62cc584e7df83481a513db83b01f87b5b9785e378"}, - {file = "kombu-5.3.6.tar.gz", hash = "sha256:f3da5b570a147a5da8280180aa80b03807283d63ea5081fcdb510d18242431d9"}, + {file = "kombu-5.3.7-py3-none-any.whl", hash = "sha256:5634c511926309c7f9789f1433e9ed402616b56836ef9878f01bd59267b4c7a9"}, + {file = "kombu-5.3.7.tar.gz", hash = "sha256:011c4cd9a355c14a1de8d35d257314a1d2456d52b7140388561acac3cf1a97bf"}, ] [package.dependencies] @@ -2133,13 +2133,13 @@ files = [ [[package]] name = "matplotlib-inline" -version = "0.1.6" +version = "0.1.7" description = "Inline Matplotlib backend for Jupyter" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" files = [ - {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, - {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, + {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, + {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, ] [package.dependencies] @@ -2355,66 +2355,66 @@ typing-extensions = ">=4.3.0" [[package]] name = "nautobot" -version = "2.2.0" +version = "2.2.1" description = "Source of truth and network automation platform." optional = false python-versions = "<3.12,>=3.8" files = [ - {file = "nautobot-2.2.0-py3-none-any.whl", hash = "sha256:918881373371661ee4fefb3177b8f28a86068164085b1383cc84966f913eca46"}, - {file = "nautobot-2.2.0.tar.gz", hash = "sha256:2232f8296d0b78885e02ab055d0b15e3a6303f633a0d0952c84c76f5978f9b4f"}, + {file = "nautobot-2.2.1-py3-none-any.whl", hash = "sha256:c65bfa5ef5abc32d70f7dbea0f821f8f865ede0135cb36398029c4b96ed42875"}, + {file = "nautobot-2.2.1.tar.gz", hash = "sha256:bbf9f6cffe0f4aa064b5e0a470028c8bffe6be0605dba49be70a849a2ed47d57"}, ] [package.dependencies] celery = ">=5.3.1,<5.4.0" Django = ">=3.2.25,<3.3.0" django-ajax-tables = ">=1.1.1,<1.2.0" -django-celery-beat = ">=2.5.0,<2.6.0" -django-celery-results = ">=2.4.0,<2.5.0" +django-celery-beat = ">=2.6.0,<2.7.0" +django-celery-results = ">=2.5.1,<2.6.0" django-constance = {version = ">=2.9.1,<2.10.0", extras = ["database"]} -django-cors-headers = ">=4.2.0,<4.3.0" +django-cors-headers = ">=4.3.1,<4.4.0" django-db-file-storage = ">=0.5.5,<0.6.0" django-extensions = ">=3.2.3,<3.3.0" -django-filter = ">=23.1,<23.2" -django-health-check = ">=3.17.0,<3.18.0" -django-jinja = ">=2.10.2,<2.11.0" +django-filter = ">=23.5,<23.6" +django-health-check = ">=3.18.1,<3.19.0" +django-jinja = ">=2.11.0,<2.12.0" django-prometheus = ">=2.3.1,<2.4.0" -django-redis = ">=5.3.0,<5.4.0" +django-redis = ">=5.4.0,<5.5.0" django-silk = ">=5.1.0,<5.2.0" -django-tables2 = ">=2.6.0,<2.7.0" +django-tables2 = ">=2.7.0,<2.8.0" django-taggit = ">=4.0.0,<4.1.0" django-timezone-field = ">=5.1,<5.2" -django-tree-queries = ">=0.16.1,<0.17.0" +django-tree-queries = ">=0.17.0,<0.18.0" django-webserver = ">=1.2.0,<1.3.0" -djangorestframework = ">=3.14.0,<3.15.0" +djangorestframework = ">=3.15.1,<3.16.0" drf-react-template-framework = ">=0.0.17,<0.0.18" -drf-spectacular = {version = "0.26.3", extras = ["sidecar"]} -emoji = ">=2.8.0,<2.9.0" -GitPython = ">=3.1.41,<3.2.0" +drf-spectacular = {version = ">=0.26.5,<0.27.0", extras = ["sidecar"]} +emoji = ">=2.11.0,<2.12.0" +GitPython = ">=3.1.43,<3.2.0" graphene-django = ">=2.16.0,<2.17.0" graphene-django-optimizer = ">=0.8.0,<0.9.0" Jinja2 = ">=3.1.3,<3.2.0" -jsonschema = ">=4.7.0,<4.19.0" -Markdown = ">=3.3.7,<3.6.0" +jsonschema = ">=4.7.0,<5.0.0" +Markdown = ">=3.5.2,<3.6.0" MarkupSafe = ">=2.1.5,<2.2.0" -netaddr = ">=0.8.0,<0.9.0" +netaddr = ">=0.10.1,<0.11.0" netutils = ">=1.6.0,<2.0.0" nh3 = ">=0.2.15,<0.3.0" packaging = ">=23.1" -Pillow = ">=10.2.0,<10.3.0" -prometheus-client = ">=0.17.1,<0.18.0" +Pillow = ">=10.3.0,<10.4.0" +prometheus-client = ">=0.20.0,<0.21.0" psycopg2-binary = ">=2.9.9,<2.10.0" python-slugify = ">=8.0.3,<8.1.0" pyuwsgi = ">=2.0.23,<2.1.0" PyYAML = ">=6.0,<6.1" -social-auth-app-django = ">=5.2.0,<5.3.0" +social-auth-app-django = ">=5.4.0,<5.5.0" svgwrite = ">=1.4.2,<1.5.0" [package.extras] -all = ["django-auth-ldap (>=4.3.0,<4.4.0)", "django-storages (>=1.13.2,<1.14.0)", "mysqlclient (>=2.2.3,<2.3.0)", "napalm (>=4.1.0,<4.2.0)", "social-auth-core[openidconnect,saml] (>=4.5.3,<4.6.0)"] -ldap = ["django-auth-ldap (>=4.3.0,<4.4.0)"] +all = ["django-auth-ldap (>=4.7.0,<4.8.0)", "django-storages (>=1.14.2,<1.15.0)", "mysqlclient (>=2.2.3,<2.3.0)", "napalm (>=4.1.0,<4.2.0)", "social-auth-core[openidconnect,saml] (>=4.5.3,<4.6.0)"] +ldap = ["django-auth-ldap (>=4.7.0,<4.8.0)"] mysql = ["mysqlclient (>=2.2.3,<2.3.0)"] napalm = ["napalm (>=4.1.0,<4.2.0)"] -remote-storage = ["django-storages (>=1.13.2,<1.14.0)"] +remote-storage = ["django-storages (>=1.14.2,<1.15.0)"] sso = ["social-auth-core[openidconnect,saml] (>=4.5.3,<4.6.0)"] [[package]] @@ -2437,22 +2437,21 @@ nautobot = ["nautobot (>=2.0.0,<3.0.0)"] [[package]] name = "nautobot-ssot" -version = "2.5.0" +version = "2.6.0" description = "Nautobot Single Source of Truth" optional = false python-versions = "<3.12,>=3.8" files = [ - {file = "nautobot_ssot-2.5.0-py3-none-any.whl", hash = "sha256:41c4243d3075f61f2e1b3f23c842ee0ac7019f9985761600e7dbc62b4105cb09"}, - {file = "nautobot_ssot-2.5.0.tar.gz", hash = "sha256:57ff7a1d503641da69ec912137f3c93d772a4e95f092ab0a65da5ab7a89448c9"}, + {file = "nautobot_ssot-2.6.0-py3-none-any.whl", hash = "sha256:61c29b1d6e180c6561eeebd0239bcd742cd116816af62a98e0711a7068a60446"}, + {file = "nautobot_ssot-2.6.0.tar.gz", hash = "sha256:07beefb87a0d7231682249c670bcf6a7441f5732495810eeef544909f29700ca"}, ] [package.dependencies] diffsync = ">=1.6.0,<2.0.0" -drf-spectacular = "0.26.3" Markdown = "!=3.3.5" nautobot = ">=2.1.0,<3.0.0" packaging = ">=21.3,<24" -prometheus-client = ">=0.17.1,<0.18.0" +prometheus-client = ">=0.17.1" [package.extras] aci = ["PyYAML (>=6)"] @@ -2483,13 +2482,13 @@ six = "*" [[package]] name = "netaddr" -version = "0.8.0" +version = "0.10.1" description = "A network address manipulation library for Python" optional = false python-versions = "*" files = [ - {file = "netaddr-0.8.0-py2.py3-none-any.whl", hash = "sha256:9666d0232c32d2656e5e5f8d735f58fd6c7457ce52fc21c98d45f2af78f990ac"}, - {file = "netaddr-0.8.0.tar.gz", hash = "sha256:d6cc57c7a07b1d9d2e917aa8b36ae8ce61c35ba3fcd1b83ca31c5a0ee2b5a243"}, + {file = "netaddr-0.10.1-py2.py3-none-any.whl", hash = "sha256:9822305b42ea1020d54fee322d43cee5622b044c07a1f0130b459bb467efcf88"}, + {file = "netaddr-0.10.1.tar.gz", hash = "sha256:f4da4222ca8c3f43c8e18a8263e5426c750a3a837fdfeccf74c68d0408eaa3bf"}, ] [[package]] @@ -2790,79 +2789,80 @@ files = [ [[package]] name = "pillow" -version = "10.2.0" +version = "10.3.0" description = "Python Imaging Library (Fork)" optional = false python-versions = ">=3.8" files = [ - {file = "pillow-10.2.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:7823bdd049099efa16e4246bdf15e5a13dbb18a51b68fa06d6c1d4d8b99a796e"}, - {file = "pillow-10.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:83b2021f2ade7d1ed556bc50a399127d7fb245e725aa0113ebd05cfe88aaf588"}, - {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fad5ff2f13d69b7e74ce5b4ecd12cc0ec530fcee76356cac6742785ff71c452"}, - {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da2b52b37dad6d9ec64e653637a096905b258d2fc2b984c41ae7d08b938a67e4"}, - {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:47c0995fc4e7f79b5cfcab1fc437ff2890b770440f7696a3ba065ee0fd496563"}, - {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:322bdf3c9b556e9ffb18f93462e5f749d3444ce081290352c6070d014c93feb2"}, - {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:51f1a1bffc50e2e9492e87d8e09a17c5eea8409cda8d3f277eb6edc82813c17c"}, - {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69ffdd6120a4737710a9eee73e1d2e37db89b620f702754b8f6e62594471dee0"}, - {file = "pillow-10.2.0-cp310-cp310-win32.whl", hash = "sha256:c6dafac9e0f2b3c78df97e79af707cdc5ef8e88208d686a4847bab8266870023"}, - {file = "pillow-10.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:aebb6044806f2e16ecc07b2a2637ee1ef67a11840a66752751714a0d924adf72"}, - {file = "pillow-10.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:7049e301399273a0136ff39b84c3678e314f2158f50f517bc50285fb5ec847ad"}, - {file = "pillow-10.2.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:35bb52c37f256f662abdfa49d2dfa6ce5d93281d323a9af377a120e89a9eafb5"}, - {file = "pillow-10.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c23f307202661071d94b5e384e1e1dc7dfb972a28a2310e4ee16103e66ddb67"}, - {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:773efe0603db30c281521a7c0214cad7836c03b8ccff897beae9b47c0b657d61"}, - {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11fa2e5984b949b0dd6d7a94d967743d87c577ff0b83392f17cb3990d0d2fd6e"}, - {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:716d30ed977be8b37d3ef185fecb9e5a1d62d110dfbdcd1e2a122ab46fddb03f"}, - {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a086c2af425c5f62a65e12fbf385f7c9fcb8f107d0849dba5839461a129cf311"}, - {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c8de2789052ed501dd829e9cae8d3dcce7acb4777ea4a479c14521c942d395b1"}, - {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:609448742444d9290fd687940ac0b57fb35e6fd92bdb65386e08e99af60bf757"}, - {file = "pillow-10.2.0-cp311-cp311-win32.whl", hash = "sha256:823ef7a27cf86df6597fa0671066c1b596f69eba53efa3d1e1cb8b30f3533068"}, - {file = "pillow-10.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:1da3b2703afd040cf65ec97efea81cfba59cdbed9c11d8efc5ab09df9509fc56"}, - {file = "pillow-10.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:edca80cbfb2b68d7b56930b84a0e45ae1694aeba0541f798e908a49d66b837f1"}, - {file = "pillow-10.2.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:1b5e1b74d1bd1b78bc3477528919414874748dd363e6272efd5abf7654e68bef"}, - {file = "pillow-10.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0eae2073305f451d8ecacb5474997c08569fb4eb4ac231ffa4ad7d342fdc25ac"}, - {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7c2286c23cd350b80d2fc9d424fc797575fb16f854b831d16fd47ceec078f2c"}, - {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e23412b5c41e58cec602f1135c57dfcf15482013ce6e5f093a86db69646a5aa"}, - {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:52a50aa3fb3acb9cf7213573ef55d31d6eca37f5709c69e6858fe3bc04a5c2a2"}, - {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:127cee571038f252a552760076407f9cff79761c3d436a12af6000cd182a9d04"}, - {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8d12251f02d69d8310b046e82572ed486685c38f02176bd08baf216746eb947f"}, - {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54f1852cd531aa981bc0965b7d609f5f6cc8ce8c41b1139f6ed6b3c54ab82bfb"}, - {file = "pillow-10.2.0-cp312-cp312-win32.whl", hash = "sha256:257d8788df5ca62c980314053197f4d46eefedf4e6175bc9412f14412ec4ea2f"}, - {file = "pillow-10.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:154e939c5f0053a383de4fd3d3da48d9427a7e985f58af8e94d0b3c9fcfcf4f9"}, - {file = "pillow-10.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:f379abd2f1e3dddb2b61bc67977a6b5a0a3f7485538bcc6f39ec76163891ee48"}, - {file = "pillow-10.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8373c6c251f7ef8bda6675dd6d2b3a0fcc31edf1201266b5cf608b62a37407f9"}, - {file = "pillow-10.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:870ea1ada0899fd0b79643990809323b389d4d1d46c192f97342eeb6ee0b8483"}, - {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4b6b1e20608493548b1f32bce8cca185bf0480983890403d3b8753e44077129"}, - {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3031709084b6e7852d00479fd1d310b07d0ba82765f973b543c8af5061cf990e"}, - {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:3ff074fc97dd4e80543a3e91f69d58889baf2002b6be64347ea8cf5533188213"}, - {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:cb4c38abeef13c61d6916f264d4845fab99d7b711be96c326b84df9e3e0ff62d"}, - {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b1b3020d90c2d8e1dae29cf3ce54f8094f7938460fb5ce8bc5c01450b01fbaf6"}, - {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:170aeb00224ab3dc54230c797f8404507240dd868cf52066f66a41b33169bdbe"}, - {file = "pillow-10.2.0-cp38-cp38-win32.whl", hash = "sha256:c4225f5220f46b2fde568c74fca27ae9771536c2e29d7c04f4fb62c83275ac4e"}, - {file = "pillow-10.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:0689b5a8c5288bc0504d9fcee48f61a6a586b9b98514d7d29b840143d6734f39"}, - {file = "pillow-10.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:b792a349405fbc0163190fde0dc7b3fef3c9268292586cf5645598b48e63dc67"}, - {file = "pillow-10.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c570f24be1e468e3f0ce7ef56a89a60f0e05b30a3669a459e419c6eac2c35364"}, - {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8ecd059fdaf60c1963c58ceb8997b32e9dc1b911f5da5307aab614f1ce5c2fb"}, - {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c365fd1703040de1ec284b176d6af5abe21b427cb3a5ff68e0759e1e313a5e7e"}, - {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:70c61d4c475835a19b3a5aa42492409878bbca7438554a1f89d20d58a7c75c01"}, - {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b6f491cdf80ae540738859d9766783e3b3c8e5bd37f5dfa0b76abdecc5081f13"}, - {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d189550615b4948f45252d7f005e53c2040cea1af5b60d6f79491a6e147eef7"}, - {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:49d9ba1ed0ef3e061088cd1e7538a0759aab559e2e0a80a36f9fd9d8c0c21591"}, - {file = "pillow-10.2.0-cp39-cp39-win32.whl", hash = "sha256:babf5acfede515f176833ed6028754cbcd0d206f7f614ea3447d67c33be12516"}, - {file = "pillow-10.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:0304004f8067386b477d20a518b50f3fa658a28d44e4116970abfcd94fac34a8"}, - {file = "pillow-10.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:0fb3e7fc88a14eacd303e90481ad983fd5b69c761e9e6ef94c983f91025da869"}, - {file = "pillow-10.2.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:322209c642aabdd6207517e9739c704dc9f9db943015535783239022002f054a"}, - {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3eedd52442c0a5ff4f887fab0c1c0bb164d8635b32c894bc1faf4c618dd89df2"}, - {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb28c753fd5eb3dd859b4ee95de66cc62af91bcff5db5f2571d32a520baf1f04"}, - {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:33870dc4653c5017bf4c8873e5488d8f8d5f8935e2f1fb9a2208c47cdd66efd2"}, - {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3c31822339516fb3c82d03f30e22b1d038da87ef27b6a78c9549888f8ceda39a"}, - {file = "pillow-10.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a2b56ba36e05f973d450582fb015594aaa78834fefe8dfb8fcd79b93e64ba4c6"}, - {file = "pillow-10.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d8e6aeb9201e655354b3ad049cb77d19813ad4ece0df1249d3c793de3774f8c7"}, - {file = "pillow-10.2.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:2247178effb34a77c11c0e8ac355c7a741ceca0a732b27bf11e747bbc950722f"}, - {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15587643b9e5eb26c48e49a7b33659790d28f190fc514a322d55da2fb5c2950e"}, - {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753cd8f2086b2b80180d9b3010dd4ed147efc167c90d3bf593fe2af21265e5a5"}, - {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7c8f97e8e7a9009bcacbe3766a36175056c12f9a44e6e6f2d5caad06dcfbf03b"}, - {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d1b35bcd6c5543b9cb547dee3150c93008f8dd0f1fef78fc0cd2b141c5baf58a"}, - {file = "pillow-10.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe4c15f6c9285dc54ce6553a3ce908ed37c8f3825b5a51a15c91442bb955b868"}, - {file = "pillow-10.2.0.tar.gz", hash = "sha256:e87f0b2c78157e12d7686b27d63c070fd65d994e8ddae6f328e0dcf4a0cd007e"}, + {file = "pillow-10.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:90b9e29824800e90c84e4022dd5cc16eb2d9605ee13f05d47641eb183cd73d45"}, + {file = "pillow-10.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2c405445c79c3f5a124573a051062300936b0281fee57637e706453e452746c"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78618cdbccaa74d3f88d0ad6cb8ac3007f1a6fa5c6f19af64b55ca170bfa1edf"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261ddb7ca91fcf71757979534fb4c128448b5b4c55cb6152d280312062f69599"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ce49c67f4ea0609933d01c0731b34b8695a7a748d6c8d186f95e7d085d2fe475"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b14f16f94cbc61215115b9b1236f9c18403c15dd3c52cf629072afa9d54c1cbf"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d33891be6df59d93df4d846640f0e46f1a807339f09e79a8040bc887bdcd7ed3"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b50811d664d392f02f7761621303eba9d1b056fb1868c8cdf4231279645c25f5"}, + {file = "pillow-10.3.0-cp310-cp310-win32.whl", hash = "sha256:ca2870d5d10d8726a27396d3ca4cf7976cec0f3cb706debe88e3a5bd4610f7d2"}, + {file = "pillow-10.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f0d0591a0aeaefdaf9a5e545e7485f89910c977087e7de2b6c388aec32011e9f"}, + {file = "pillow-10.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:ccce24b7ad89adb5a1e34a6ba96ac2530046763912806ad4c247356a8f33a67b"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:5f77cf66e96ae734717d341c145c5949c63180842a545c47a0ce7ae52ca83795"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4b878386c4bf293578b48fc570b84ecfe477d3b77ba39a6e87150af77f40c57"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdcbb4068117dfd9ce0138d068ac512843c52295ed996ae6dd1faf537b6dbc27"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9797a6c8fe16f25749b371c02e2ade0efb51155e767a971c61734b1bf6293994"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9e91179a242bbc99be65e139e30690e081fe6cb91a8e77faf4c409653de39451"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b87bd9d81d179bd8ab871603bd80d8645729939f90b71e62914e816a76fc6bd"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:81d09caa7b27ef4e61cb7d8fbf1714f5aec1c6b6c5270ee53504981e6e9121ad"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:048ad577748b9fa4a99a0548c64f2cb8d672d5bf2e643a739ac8faff1164238c"}, + {file = "pillow-10.3.0-cp311-cp311-win32.whl", hash = "sha256:7161ec49ef0800947dc5570f86568a7bb36fa97dd09e9827dc02b718c5643f09"}, + {file = "pillow-10.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:8eb0908e954d093b02a543dc963984d6e99ad2b5e36503d8a0aaf040505f747d"}, + {file = "pillow-10.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e6f7d1c414191c1199f8996d3f2282b9ebea0945693fb67392c75a3a320941f"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:e46f38133e5a060d46bd630faa4d9fa0202377495df1f068a8299fd78c84de84"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:50b8eae8f7334ec826d6eeffaeeb00e36b5e24aa0b9df322c247539714c6df19"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d3bea1c75f8c53ee4d505c3e67d8c158ad4df0d83170605b50b64025917f338"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19aeb96d43902f0a783946a0a87dbdad5c84c936025b8419da0a0cd7724356b1"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74d28c17412d9caa1066f7a31df8403ec23d5268ba46cd0ad2c50fb82ae40462"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ff61bfd9253c3915e6d41c651d5f962da23eda633cf02262990094a18a55371a"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d886f5d353333b4771d21267c7ecc75b710f1a73d72d03ca06df49b09015a9ef"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b5ec25d8b17217d635f8935dbc1b9aa5907962fae29dff220f2659487891cd3"}, + {file = "pillow-10.3.0-cp312-cp312-win32.whl", hash = "sha256:51243f1ed5161b9945011a7360e997729776f6e5d7005ba0c6879267d4c5139d"}, + {file = "pillow-10.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:412444afb8c4c7a6cc11a47dade32982439925537e483be7c0ae0cf96c4f6a0b"}, + {file = "pillow-10.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:798232c92e7665fe82ac085f9d8e8ca98826f8e27859d9a96b41d519ecd2e49a"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:4eaa22f0d22b1a7e93ff0a596d57fdede2e550aecffb5a1ef1106aaece48e96b"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cd5e14fbf22a87321b24c88669aad3a51ec052eb145315b3da3b7e3cc105b9a2"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1530e8f3a4b965eb6a7785cf17a426c779333eb62c9a7d1bbcf3ffd5bf77a4aa"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d512aafa1d32efa014fa041d38868fda85028e3f930a96f85d49c7d8ddc0383"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:339894035d0ede518b16073bdc2feef4c991ee991a29774b33e515f1d308e08d"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:aa7e402ce11f0885305bfb6afb3434b3cd8f53b563ac065452d9d5654c7b86fd"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0ea2a783a2bdf2a561808fe4a7a12e9aa3799b701ba305de596bc48b8bdfce9d"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c78e1b00a87ce43bb37642c0812315b411e856a905d58d597750eb79802aaaa3"}, + {file = "pillow-10.3.0-cp38-cp38-win32.whl", hash = "sha256:72d622d262e463dfb7595202d229f5f3ab4b852289a1cd09650362db23b9eb0b"}, + {file = "pillow-10.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:2034f6759a722da3a3dbd91a81148cf884e91d1b747992ca288ab88c1de15999"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2ed854e716a89b1afcedea551cd85f2eb2a807613752ab997b9974aaa0d56936"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dc1a390a82755a8c26c9964d457d4c9cbec5405896cba94cf51f36ea0d855002"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4203efca580f0dd6f882ca211f923168548f7ba334c189e9eab1178ab840bf60"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3102045a10945173d38336f6e71a8dc71bcaeed55c3123ad4af82c52807b9375"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6fb1b30043271ec92dc65f6d9f0b7a830c210b8a96423074b15c7bc999975f57"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1dfc94946bc60ea375cc39cff0b8da6c7e5f8fcdc1d946beb8da5c216156ddd8"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b09b86b27a064c9624d0a6c54da01c1beaf5b6cadfa609cf63789b1d08a797b9"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3b2348a78bc939b4fed6552abfd2e7988e0f81443ef3911a4b8498ca084f6eb"}, + {file = "pillow-10.3.0-cp39-cp39-win32.whl", hash = "sha256:45ebc7b45406febf07fef35d856f0293a92e7417ae7933207e90bf9090b70572"}, + {file = "pillow-10.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:0ba26351b137ca4e0db0342d5d00d2e355eb29372c05afd544ebf47c0956ffeb"}, + {file = "pillow-10.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:50fd3f6b26e3441ae07b7c979309638b72abc1a25da31a81a7fbd9495713ef4f"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6b02471b72526ab8a18c39cb7967b72d194ec53c1fd0a70b050565a0f366d355"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8ab74c06ffdab957d7670c2a5a6e1a70181cd10b727cd788c4dd9005b6a8acd9"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:048eeade4c33fdf7e08da40ef402e748df113fd0b4584e32c4af74fe78baaeb2"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2ec1e921fd07c7cda7962bad283acc2f2a9ccc1b971ee4b216b75fad6f0463"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c8e73e99da7db1b4cad7f8d682cf6abad7844da39834c288fbfa394a47bbced"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:16563993329b79513f59142a6b02055e10514c1a8e86dca8b48a893e33cf91e3"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd78700f5788ae180b5ee8902c6aea5a5726bac7c364b202b4b3e3ba2d293170"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:aff76a55a8aa8364d25400a210a65ff59d0168e0b4285ba6bf2bd83cf675ba32"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b7bc2176354defba3edc2b9a777744462da2f8e921fbaf61e52acb95bafa9828"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:793b4e24db2e8742ca6423d3fde8396db336698c55cd34b660663ee9e45ed37f"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93480005693d247f8346bc8ee28c72a2191bdf1f6b5db469c096c0c867ac015"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c83341b89884e2b2e55886e8fbbf37c3fa5efd6c8907124aeb72f285ae5696e5"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a1d1915db1a4fdb2754b9de292642a39a7fb28f1736699527bb649484fb966a"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a0eaa93d054751ee9964afa21c06247779b90440ca41d184aeb5d410f20ff591"}, + {file = "pillow-10.3.0.tar.gz", hash = "sha256:9d2455fbf44c914840c793e89aa82d0e1763a14253a000743719ae5946814b2d"}, ] [package.extras] @@ -2901,13 +2901,13 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest- [[package]] name = "prometheus-client" -version = "0.17.1" +version = "0.20.0" description = "Python client for the Prometheus monitoring system." optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "prometheus_client-0.17.1-py3-none-any.whl", hash = "sha256:e537f37160f6807b8202a6fc4764cdd19bac5480ddd3e0d463c3002b34462101"}, - {file = "prometheus_client-0.17.1.tar.gz", hash = "sha256:21e674f39831ae3f8acde238afd9a27a37d0d2fb5a28ea094f0ce25d2cbf2091"}, + {file = "prometheus_client-0.20.0-py3-none-any.whl", hash = "sha256:cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7"}, + {file = "prometheus_client-0.20.0.tar.gz", hash = "sha256:287629d00b147a32dcb2be0b9df905da599b2d82f80377083ec8463309a4bb89"}, ] [package.extras] @@ -3587,104 +3587,104 @@ rpds-py = ">=0.7.0" [[package]] name = "regex" -version = "2023.12.25" +version = "2024.4.16" description = "Alternative regular expression module, to replace re." optional = false python-versions = ">=3.7" files = [ - {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, - {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, - {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, - {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, - {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, - {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, - {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, - {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, - {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, - {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, - {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, - {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, - {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, - {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, - {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, - {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, - {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, - {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, - {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, - {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, - {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, - {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, - {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, - {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, - {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, - {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, - {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, - {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, - {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, + {file = "regex-2024.4.16-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fb83cc090eac63c006871fd24db5e30a1f282faa46328572661c0a24a2323a08"}, + {file = "regex-2024.4.16-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8c91e1763696c0eb66340c4df98623c2d4e77d0746b8f8f2bee2c6883fd1fe18"}, + {file = "regex-2024.4.16-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:10188fe732dec829c7acca7422cdd1bf57d853c7199d5a9e96bb4d40db239c73"}, + {file = "regex-2024.4.16-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:956b58d692f235cfbf5b4f3abd6d99bf102f161ccfe20d2fd0904f51c72c4c66"}, + {file = "regex-2024.4.16-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a70b51f55fd954d1f194271695821dd62054d949efd6368d8be64edd37f55c86"}, + {file = "regex-2024.4.16-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c02fcd2bf45162280613d2e4a1ca3ac558ff921ae4e308ecb307650d3a6ee51"}, + {file = "regex-2024.4.16-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ed75ea6892a56896d78f11006161eea52c45a14994794bcfa1654430984b22"}, + {file = "regex-2024.4.16-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd727ad276bb91928879f3aa6396c9a1d34e5e180dce40578421a691eeb77f47"}, + {file = "regex-2024.4.16-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7cbc5d9e8a1781e7be17da67b92580d6ce4dcef5819c1b1b89f49d9678cc278c"}, + {file = "regex-2024.4.16-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:78fddb22b9ef810b63ef341c9fcf6455232d97cfe03938cbc29e2672c436670e"}, + {file = "regex-2024.4.16-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:445ca8d3c5a01309633a0c9db57150312a181146315693273e35d936472df912"}, + {file = "regex-2024.4.16-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:95399831a206211d6bc40224af1c635cb8790ddd5c7493e0bd03b85711076a53"}, + {file = "regex-2024.4.16-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:7731728b6568fc286d86745f27f07266de49603a6fdc4d19c87e8c247be452af"}, + {file = "regex-2024.4.16-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4facc913e10bdba42ec0aee76d029aedda628161a7ce4116b16680a0413f658a"}, + {file = "regex-2024.4.16-cp310-cp310-win32.whl", hash = "sha256:911742856ce98d879acbea33fcc03c1d8dc1106234c5e7d068932c945db209c0"}, + {file = "regex-2024.4.16-cp310-cp310-win_amd64.whl", hash = "sha256:e0a2df336d1135a0b3a67f3bbf78a75f69562c1199ed9935372b82215cddd6e2"}, + {file = "regex-2024.4.16-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1210365faba7c2150451eb78ec5687871c796b0f1fa701bfd2a4a25420482d26"}, + {file = "regex-2024.4.16-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9ab40412f8cd6f615bfedea40c8bf0407d41bf83b96f6fc9ff34976d6b7037fd"}, + {file = "regex-2024.4.16-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fd80d1280d473500d8086d104962a82d77bfbf2b118053824b7be28cd5a79ea5"}, + {file = "regex-2024.4.16-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bb966fdd9217e53abf824f437a5a2d643a38d4fd5fd0ca711b9da683d452969"}, + {file = "regex-2024.4.16-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:20b7a68444f536365af42a75ccecb7ab41a896a04acf58432db9e206f4e525d6"}, + {file = "regex-2024.4.16-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b74586dd0b039c62416034f811d7ee62810174bb70dffcca6439f5236249eb09"}, + {file = "regex-2024.4.16-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c8290b44d8b0af4e77048646c10c6e3aa583c1ca67f3b5ffb6e06cf0c6f0f89"}, + {file = "regex-2024.4.16-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2d80a6749724b37853ece57988b39c4e79d2b5fe2869a86e8aeae3bbeef9eb0"}, + {file = "regex-2024.4.16-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3a1018e97aeb24e4f939afcd88211ace472ba566efc5bdf53fd8fd7f41fa7170"}, + {file = "regex-2024.4.16-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8d015604ee6204e76569d2f44e5a210728fa917115bef0d102f4107e622b08d5"}, + {file = "regex-2024.4.16-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:3d5ac5234fb5053850d79dd8eb1015cb0d7d9ed951fa37aa9e6249a19aa4f336"}, + {file = "regex-2024.4.16-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:0a38d151e2cdd66d16dab550c22f9521ba79761423b87c01dae0a6e9add79c0d"}, + {file = "regex-2024.4.16-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:159dc4e59a159cb8e4e8f8961eb1fa5d58f93cb1acd1701d8aff38d45e1a84a6"}, + {file = "regex-2024.4.16-cp311-cp311-win32.whl", hash = "sha256:ba2336d6548dee3117520545cfe44dc28a250aa091f8281d28804aa8d707d93d"}, + {file = "regex-2024.4.16-cp311-cp311-win_amd64.whl", hash = "sha256:8f83b6fd3dc3ba94d2b22717f9c8b8512354fd95221ac661784df2769ea9bba9"}, + {file = "regex-2024.4.16-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:80b696e8972b81edf0af2a259e1b2a4a661f818fae22e5fa4fa1a995fb4a40fd"}, + {file = "regex-2024.4.16-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d61ae114d2a2311f61d90c2ef1358518e8f05eafda76eaf9c772a077e0b465ec"}, + {file = "regex-2024.4.16-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8ba6745440b9a27336443b0c285d705ce73adb9ec90e2f2004c64d95ab5a7598"}, + {file = "regex-2024.4.16-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6295004b2dd37b0835ea5c14a33e00e8cfa3c4add4d587b77287825f3418d310"}, + {file = "regex-2024.4.16-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4aba818dcc7263852aabb172ec27b71d2abca02a593b95fa79351b2774eb1d2b"}, + {file = "regex-2024.4.16-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d0800631e565c47520aaa04ae38b96abc5196fe8b4aa9bd864445bd2b5848a7a"}, + {file = "regex-2024.4.16-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08dea89f859c3df48a440dbdcd7b7155bc675f2fa2ec8c521d02dc69e877db70"}, + {file = "regex-2024.4.16-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eeaa0b5328b785abc344acc6241cffde50dc394a0644a968add75fcefe15b9d4"}, + {file = "regex-2024.4.16-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4e819a806420bc010489f4e741b3036071aba209f2e0989d4750b08b12a9343f"}, + {file = "regex-2024.4.16-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:c2d0e7cbb6341e830adcbfa2479fdeebbfbb328f11edd6b5675674e7a1e37730"}, + {file = "regex-2024.4.16-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:91797b98f5e34b6a49f54be33f72e2fb658018ae532be2f79f7c63b4ae225145"}, + {file = "regex-2024.4.16-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:d2da13568eff02b30fd54fccd1e042a70fe920d816616fda4bf54ec705668d81"}, + {file = "regex-2024.4.16-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:370c68dc5570b394cbaadff50e64d705f64debed30573e5c313c360689b6aadc"}, + {file = "regex-2024.4.16-cp312-cp312-win32.whl", hash = "sha256:904c883cf10a975b02ab3478bce652f0f5346a2c28d0a8521d97bb23c323cc8b"}, + {file = "regex-2024.4.16-cp312-cp312-win_amd64.whl", hash = "sha256:785c071c982dce54d44ea0b79cd6dfafddeccdd98cfa5f7b86ef69b381b457d9"}, + {file = "regex-2024.4.16-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e2f142b45c6fed48166faeb4303b4b58c9fcd827da63f4cf0a123c3480ae11fb"}, + {file = "regex-2024.4.16-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e87ab229332ceb127a165612d839ab87795972102cb9830e5f12b8c9a5c1b508"}, + {file = "regex-2024.4.16-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:81500ed5af2090b4a9157a59dbc89873a25c33db1bb9a8cf123837dcc9765047"}, + {file = "regex-2024.4.16-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b340cccad138ecb363324aa26893963dcabb02bb25e440ebdf42e30963f1a4e0"}, + {file = "regex-2024.4.16-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c72608e70f053643437bd2be0608f7f1c46d4022e4104d76826f0839199347a"}, + {file = "regex-2024.4.16-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a01fe2305e6232ef3e8f40bfc0f0f3a04def9aab514910fa4203bafbc0bb4682"}, + {file = "regex-2024.4.16-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:03576e3a423d19dda13e55598f0fd507b5d660d42c51b02df4e0d97824fdcae3"}, + {file = "regex-2024.4.16-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:549c3584993772e25f02d0656ac48abdda73169fe347263948cf2b1cead622f3"}, + {file = "regex-2024.4.16-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:34422d5a69a60b7e9a07a690094e824b66f5ddc662a5fc600d65b7c174a05f04"}, + {file = "regex-2024.4.16-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:5f580c651a72b75c39e311343fe6875d6f58cf51c471a97f15a938d9fe4e0d37"}, + {file = "regex-2024.4.16-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3399dd8a7495bbb2bacd59b84840eef9057826c664472e86c91d675d007137f5"}, + {file = "regex-2024.4.16-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8d1f86f3f4e2388aa3310b50694ac44daefbd1681def26b4519bd050a398dc5a"}, + {file = "regex-2024.4.16-cp37-cp37m-win32.whl", hash = "sha256:dd5acc0a7d38fdc7a3a6fd3ad14c880819008ecb3379626e56b163165162cc46"}, + {file = "regex-2024.4.16-cp37-cp37m-win_amd64.whl", hash = "sha256:ba8122e3bb94ecda29a8de4cf889f600171424ea586847aa92c334772d200331"}, + {file = "regex-2024.4.16-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:743deffdf3b3481da32e8a96887e2aa945ec6685af1cfe2bcc292638c9ba2f48"}, + {file = "regex-2024.4.16-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7571f19f4a3fd00af9341c7801d1ad1967fc9c3f5e62402683047e7166b9f2b4"}, + {file = "regex-2024.4.16-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:df79012ebf6f4efb8d307b1328226aef24ca446b3ff8d0e30202d7ebcb977a8c"}, + {file = "regex-2024.4.16-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e757d475953269fbf4b441207bb7dbdd1c43180711b6208e129b637792ac0b93"}, + {file = "regex-2024.4.16-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4313ab9bf6a81206c8ac28fdfcddc0435299dc88cad12cc6305fd0e78b81f9e4"}, + {file = "regex-2024.4.16-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d83c2bc678453646f1a18f8db1e927a2d3f4935031b9ad8a76e56760461105dd"}, + {file = "regex-2024.4.16-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9df1bfef97db938469ef0a7354b2d591a2d438bc497b2c489471bec0e6baf7c4"}, + {file = "regex-2024.4.16-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62120ed0de69b3649cc68e2965376048793f466c5a6c4370fb27c16c1beac22d"}, + {file = "regex-2024.4.16-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c2ef6f7990b6e8758fe48ad08f7e2f66c8f11dc66e24093304b87cae9037bb4a"}, + {file = "regex-2024.4.16-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8fc6976a3395fe4d1fbeb984adaa8ec652a1e12f36b56ec8c236e5117b585427"}, + {file = "regex-2024.4.16-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:03e68f44340528111067cecf12721c3df4811c67268b897fbe695c95f860ac42"}, + {file = "regex-2024.4.16-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ec7e0043b91115f427998febaa2beb82c82df708168b35ece3accb610b91fac1"}, + {file = "regex-2024.4.16-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c21fc21a4c7480479d12fd8e679b699f744f76bb05f53a1d14182b31f55aac76"}, + {file = "regex-2024.4.16-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:12f6a3f2f58bb7344751919a1876ee1b976fe08b9ffccb4bbea66f26af6017b9"}, + {file = "regex-2024.4.16-cp38-cp38-win32.whl", hash = "sha256:479595a4fbe9ed8f8f72c59717e8cf222da2e4c07b6ae5b65411e6302af9708e"}, + {file = "regex-2024.4.16-cp38-cp38-win_amd64.whl", hash = "sha256:0534b034fba6101611968fae8e856c1698da97ce2efb5c2b895fc8b9e23a5834"}, + {file = "regex-2024.4.16-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a7ccdd1c4a3472a7533b0a7aa9ee34c9a2bef859ba86deec07aff2ad7e0c3b94"}, + {file = "regex-2024.4.16-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6f2f017c5be19984fbbf55f8af6caba25e62c71293213f044da3ada7091a4455"}, + {file = "regex-2024.4.16-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:803b8905b52de78b173d3c1e83df0efb929621e7b7c5766c0843704d5332682f"}, + {file = "regex-2024.4.16-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:684008ec44ad275832a5a152f6e764bbe1914bea10968017b6feaecdad5736e0"}, + {file = "regex-2024.4.16-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65436dce9fdc0aeeb0a0effe0839cb3d6a05f45aa45a4d9f9c60989beca78b9c"}, + {file = "regex-2024.4.16-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea355eb43b11764cf799dda62c658c4d2fdb16af41f59bb1ccfec517b60bcb07"}, + {file = "regex-2024.4.16-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98c1165f3809ce7774f05cb74e5408cd3aa93ee8573ae959a97a53db3ca3180d"}, + {file = "regex-2024.4.16-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cccc79a9be9b64c881f18305a7c715ba199e471a3973faeb7ba84172abb3f317"}, + {file = "regex-2024.4.16-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00169caa125f35d1bca6045d65a662af0202704489fada95346cfa092ec23f39"}, + {file = "regex-2024.4.16-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6cc38067209354e16c5609b66285af17a2863a47585bcf75285cab33d4c3b8df"}, + {file = "regex-2024.4.16-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:23cff1b267038501b179ccbbd74a821ac4a7192a1852d1d558e562b507d46013"}, + {file = "regex-2024.4.16-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:b9d320b3bf82a39f248769fc7f188e00f93526cc0fe739cfa197868633d44701"}, + {file = "regex-2024.4.16-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:89ec7f2c08937421bbbb8b48c54096fa4f88347946d4747021ad85f1b3021b3c"}, + {file = "regex-2024.4.16-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4918fd5f8b43aa7ec031e0fef1ee02deb80b6afd49c85f0790be1dc4ce34cb50"}, + {file = "regex-2024.4.16-cp39-cp39-win32.whl", hash = "sha256:684e52023aec43bdf0250e843e1fdd6febbe831bd9d52da72333fa201aaa2335"}, + {file = "regex-2024.4.16-cp39-cp39-win_amd64.whl", hash = "sha256:e697e1c0238133589e00c244a8b676bc2cfc3ab4961318d902040d099fec7483"}, + {file = "regex-2024.4.16.tar.gz", hash = "sha256:fa454d26f2e87ad661c4f0c5a5fe4cf6aab1e307d1b94f16ffdfcb089ba685c0"}, ] [[package]] @@ -3932,28 +3932,28 @@ files = [ [[package]] name = "ruff" -version = "0.3.5" +version = "0.3.7" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.3.5-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:aef5bd3b89e657007e1be6b16553c8813b221ff6d92c7526b7e0227450981eac"}, - {file = "ruff-0.3.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:89b1e92b3bd9fca249153a97d23f29bed3992cff414b222fcd361d763fc53f12"}, - {file = "ruff-0.3.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e55771559c89272c3ebab23326dc23e7f813e492052391fe7950c1a5a139d89"}, - {file = "ruff-0.3.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dabc62195bf54b8a7876add6e789caae0268f34582333cda340497c886111c39"}, - {file = "ruff-0.3.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a05f3793ba25f194f395578579c546ca5d83e0195f992edc32e5907d142bfa3"}, - {file = "ruff-0.3.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:dfd3504e881082959b4160ab02f7a205f0fadc0a9619cc481982b6837b2fd4c0"}, - {file = "ruff-0.3.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87258e0d4b04046cf1d6cc1c56fadbf7a880cc3de1f7294938e923234cf9e498"}, - {file = "ruff-0.3.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:712e71283fc7d9f95047ed5f793bc019b0b0a29849b14664a60fd66c23b96da1"}, - {file = "ruff-0.3.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a532a90b4a18d3f722c124c513ffb5e5eaff0cc4f6d3aa4bda38e691b8600c9f"}, - {file = "ruff-0.3.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:122de171a147c76ada00f76df533b54676f6e321e61bd8656ae54be326c10296"}, - {file = "ruff-0.3.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d80a6b18a6c3b6ed25b71b05eba183f37d9bc8b16ace9e3d700997f00b74660b"}, - {file = "ruff-0.3.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a7b6e63194c68bca8e71f81de30cfa6f58ff70393cf45aab4c20f158227d5936"}, - {file = "ruff-0.3.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a759d33a20c72f2dfa54dae6e85e1225b8e302e8ac655773aff22e542a300985"}, - {file = "ruff-0.3.5-py3-none-win32.whl", hash = "sha256:9d8605aa990045517c911726d21293ef4baa64f87265896e491a05461cae078d"}, - {file = "ruff-0.3.5-py3-none-win_amd64.whl", hash = "sha256:dc56bb16a63c1303bd47563c60482a1512721053d93231cf7e9e1c6954395a0e"}, - {file = "ruff-0.3.5-py3-none-win_arm64.whl", hash = "sha256:faeeae9905446b975dcf6d4499dc93439b131f1443ee264055c5716dd947af55"}, - {file = "ruff-0.3.5.tar.gz", hash = "sha256:a067daaeb1dc2baf9b82a32dae67d154d95212080c80435eb052d95da647763d"}, + {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e8377cccb2f07abd25e84fc5b2cbe48eeb0fea9f1719cad7caedb061d70e5ce"}, + {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:15a4d1cc1e64e556fa0d67bfd388fed416b7f3b26d5d1c3e7d192c897e39ba4b"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d28bdf3d7dc71dd46929fafeec98ba89b7c3550c3f0978e36389b5631b793663"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:379b67d4f49774ba679593b232dcd90d9e10f04d96e3c8ce4a28037ae473f7bb"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c060aea8ad5ef21cdfbbe05475ab5104ce7827b639a78dd55383a6e9895b7c51"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ebf8f615dde968272d70502c083ebf963b6781aacd3079081e03b32adfe4d58a"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d48098bd8f5c38897b03604f5428901b65e3c97d40b3952e38637b5404b739a2"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8a4fda219bf9024692b1bc68c9cff4b80507879ada8769dc7e985755d662ea"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c44e0149f1d8b48c4d5c33d88c677a4aa22fd09b1683d6a7ff55b816b5d074f"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3050ec0af72b709a62ecc2aca941b9cd479a7bf2b36cc4562f0033d688e44fa1"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a29cc38e4c1ab00da18a3f6777f8b50099d73326981bb7d182e54a9a21bb4ff7"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5b15cc59c19edca917f51b1956637db47e200b0fc5e6e1878233d3a938384b0b"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e491045781b1e38b72c91247cf4634f040f8d0cb3e6d3d64d38dcf43616650b4"}, + {file = "ruff-0.3.7-py3-none-win32.whl", hash = "sha256:bc931de87593d64fad3a22e201e55ad76271f1d5bfc44e1a1887edd0903c7d9f"}, + {file = "ruff-0.3.7-py3-none-win_amd64.whl", hash = "sha256:5ef0e501e1e39f35e03c2acb1d1238c595b8bb36cf7a170e7c1df1b73da00e74"}, + {file = "ruff-0.3.7-py3-none-win_arm64.whl", hash = "sha256:789e144f6dc7019d1f92a812891c645274ed08af6037d11fc65fcbc183b7d59f"}, + {file = "ruff-0.3.7.tar.gz", hash = "sha256:d5c1aebee5162c2226784800ae031f660c350e7a3402c4d1f8ea4e97e232e3ba"}, ] [[package]] @@ -3982,18 +3982,18 @@ paramiko = "*" [[package]] name = "setuptools" -version = "69.2.0" +version = "69.5.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, - {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, + {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, + {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -4046,13 +4046,13 @@ files = [ [[package]] name = "social-auth-app-django" -version = "5.2.0" +version = "5.4.0" description = "Python Social Authentication, Django integration." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "social-auth-app-django-5.2.0.tar.gz", hash = "sha256:4a5dae406f3874b4003708ff120c02cb1a4c8eeead56cd163646347309fcd0f8"}, - {file = "social_auth_app_django-5.2.0-py3-none-any.whl", hash = "sha256:0347ca4cd23ea9d15a665da9d22950552fb66b95600e6c2ebae38ca883b3a4ed"}, + {file = "social-auth-app-django-5.4.0.tar.gz", hash = "sha256:09ac02a063cb313eed5e9ef2f9ac4477c8bf5bbd685925ff3aba43f9072f1bbb"}, + {file = "social_auth_app_django-5.4.0-py3-none-any.whl", hash = "sha256:28c65b2e2092f30cdb3cf912eeaa6988b49fdf4001b29bd89e683673d700a38e"}, ] [package.dependencies] @@ -4087,19 +4087,18 @@ saml = ["python3-saml (>=1.5.0)"] [[package]] name = "sqlparse" -version = "0.4.4" +version = "0.5.0" description = "A non-validating SQL parser." optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" files = [ - {file = "sqlparse-0.4.4-py3-none-any.whl", hash = "sha256:5430a4fe2ac7d0f93e66f1efc6e1338a41884b7ddf2a350cedd20ccc4d9d28f3"}, - {file = "sqlparse-0.4.4.tar.gz", hash = "sha256:d446183e84b8349fa3061f0fe7f06ca94ba65b426946ffebe6e3e8295332420c"}, + {file = "sqlparse-0.5.0-py3-none-any.whl", hash = "sha256:c204494cd97479d0e39f28c93d46c0b2d5959c7b9ab904762ea6c7af211c8663"}, + {file = "sqlparse-0.5.0.tar.gz", hash = "sha256:714d0a4932c059d16189f58ef5411ec2287a4360f17cdd0edd2d09d4c5087c93"}, ] [package.extras] -dev = ["build", "flake8"] +dev = ["build", "hatch"] doc = ["sphinx"] -test = ["pytest", "pytest-cov"] [[package]] name = "stack-data" diff --git a/pyproject.toml b/pyproject.toml index 3eece028..c0a95af7 100755 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nautobot-device-onboarding" -version = "3.0.2a11" +version = "3.0.2a12" description = "A app for Nautobot to easily onboard new devices." authors = ["Network to Code, LLC "] license = "Apache-2.0" From af351fe2deda87ab33a541329e0d7efde90fc6db Mon Sep 17 00:00:00 2001 From: Jeff Kala Date: Thu, 18 Apr 2024 13:36:26 -0500 Subject: [PATCH 222/225] fix link in docs --- docs/dev/extending.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/dev/extending.md b/docs/dev/extending.md index 452ffac1..b4b09ebd 100644 --- a/docs/dev/extending.md +++ b/docs/dev/extending.md @@ -19,4 +19,4 @@ Extending the platform support for the SSoT specific jobs should be accomplished New platform support should be simplified in this framework, by providing a YAML file. -The format of these YAML files are and how to extend this application is covered in [App YAML Overrides](./app_yaml_overrides.md). +The format of these YAML files are and how to extend this application is covered in [App YAML Overrides](../user/app_yaml_overrides.md). From 438aabb4980926d0edbe6ded59e24e6a18992f04 Mon Sep 17 00:00:00 2001 From: David Cates Date: Thu, 18 Apr 2024 11:45:53 -0700 Subject: [PATCH 223/225] update mkdocs --- mkdocs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mkdocs.yml b/mkdocs.yml index ed8a849f..13a433d1 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -120,7 +120,7 @@ nav: - v1.0: "admin/release_notes/version_1.0.md" - Developer Guide: - Extending the App: "dev/extending.md" - - YAML Overrides: "dev/app_yaml_overrides.md" + - YAML Overrides: "user/app_yaml_overrides.md" - Onboarding Extensions: "dev/onboarding_extensions.md" - Contributing to the App: "dev/contributing.md" - Development Environment: "dev/dev_environment.md" From ab1a711fef365ae1fa0c5cfaa5d356d90e8d1ff4 Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Thu, 18 Apr 2024 18:52:53 +0000 Subject: [PATCH 224/225] removed extra rd config --- .../command_mappers/cisco_nxos.yml | 7 +--- .../nornir_plays/formatter.py | 40 ++++--------------- 2 files changed, 9 insertions(+), 38 deletions(-) diff --git a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml index 78ac56f6..c672d648 100755 --- a/nautobot_device_onboarding/command_mappers/cisco_nxos.yml +++ b/nautobot_device_onboarding/command_mappers/cisco_nxos.yml @@ -81,12 +81,7 @@ network_importer: commands: - command: "show vrf interface" use_textfsm: true - jpath: "[*].{interface: interface, name: name, id: id}" - vrf_rds: - commands: - - command: "show vrf detail" - use_textfsm: true - jpath: "[*].{id: id, name: name, default_rd: default_rd}" + jpath: "[*].{interface: interface, name: name}" vlans: commands: - command: "show vlan" diff --git a/nautobot_device_onboarding/nornir_plays/formatter.py b/nautobot_device_onboarding/nornir_plays/formatter.py index c66f98d1..00e93bbb 100755 --- a/nautobot_device_onboarding/nornir_plays/formatter.py +++ b/nautobot_device_onboarding/nornir_plays/formatter.py @@ -269,14 +269,9 @@ def format_ios_results(device): if canonical_name.startswith("VLAN"): canonical_name = canonical_name.replace("VLAN", "Vlan", 1) interface_dict.setdefault(canonical_name, {}) - if vrf["default_rd"] == "": - interface_dict[canonical_name]["vrf"] = { - "name": vrf["name"], - } - else: - interface_dict[canonical_name]["vrf"] = { - "name": vrf["name"], - } + interface_dict[canonical_name]["vrf"] = { + "name": vrf["name"], + } except KeyError: print(f"Error: VRF configuration on interface {interface} not as expected.") continue @@ -309,7 +304,6 @@ def format_nxos_results(device): macs = device.get("mac_address", []) descriptions = device.get("description", []) link_statuses = device.get("link_status", []) - vrfs_rd = device.get("vrf_rds", []) vrfs_interfaces = device.get("vrf_interfaces", []) vlans = device.get("vlans", []) interface_vlans = device.get("interface_vlans", []) @@ -325,11 +319,8 @@ def format_nxos_results(device): link_status_list = ensure_list(link_statuses) vlan_list = ensure_list(vlans) interface_vlan_list = ensure_list(interface_vlans) + # vrfs_interfaces_list = ensure_list(vrfs_interfaces) - if vrfs_rd is None: - vrfs_rds = [] - else: - vrfs_rds = ensure_list(vrfs_rd) if vrfs_interfaces is None: vrfs_interfaces = [] else: @@ -443,25 +434,11 @@ def format_nxos_results(device): for interface_name, interface_info in interface_dict.items(): interface_list.append({canonical_interface_name(interface_name): interface_info}) - # Populate vrf_dict from commands and add to interface_dict - vrf_dict = {vrf["id"]: vrf for vrf in vrfs_rds} - - for interface in vrfs_interfaces: - vrf_id = interface["id"] - if "interfaces" not in vrf_dict[vrf_id]: - vrf_dict[vrf_id]["interfaces"] = [] - vrf_dict[vrf_id]["interfaces"].append(interface["interface"]) - - vrf_list = list(vrf_dict.values()) - for vrf in vrf_list: + for vrf in vrfs_interfaces: try: - if "interfaces" in vrf: - for interface in vrf["interfaces"]: - canonical_name = canonical_interface_name(interface) - if canonical_name.startswith("VLAN"): - canonical_name = canonical_name.replace("VLAN", "Vlan", 1) - interface_dict.setdefault(canonical_name, {}) - interface_dict[canonical_name]["vrf"] = {"name": vrf["name"]} + canonical_name = canonical_interface_name(vrf["interface"]) + interface_dict.setdefault(canonical_name, {}) + interface_dict[canonical_name]["vrf"] = {"name": vrf["name"]} except KeyError: print(f"Error: VRF configuration on interface {interface} not as expected.") continue @@ -477,7 +454,6 @@ def format_nxos_results(device): del device["description"] del device["link_status"] del device["mode"] - del device["vrf_rds"] del device["vrf_interfaces"] del device["vlans"] del device["interface_vlans"] From 38e330716a4df1c2ec20e0f9cb4a1fb875a24cf3 Mon Sep 17 00:00:00 2001 From: Susan Hooks Date: Thu, 18 Apr 2024 18:54:47 +0000 Subject: [PATCH 225/225] cleanup --- nautobot_device_onboarding/nornir_plays/formatter.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nautobot_device_onboarding/nornir_plays/formatter.py b/nautobot_device_onboarding/nornir_plays/formatter.py index 00e93bbb..a1fe5cc2 100755 --- a/nautobot_device_onboarding/nornir_plays/formatter.py +++ b/nautobot_device_onboarding/nornir_plays/formatter.py @@ -319,7 +319,6 @@ def format_nxos_results(device): link_status_list = ensure_list(link_statuses) vlan_list = ensure_list(vlans) interface_vlan_list = ensure_list(interface_vlans) - # vrfs_interfaces_list = ensure_list(vrfs_interfaces) if vrfs_interfaces is None: vrfs_interfaces = []