diff --git a/.docker/configuration/configuration.py b/.docker/configuration/configuration.py index c70590babe61756d7ad6bb8141c0582c6e93bee8..52d7c63b6ed02ed66346059d4b921f5fd49a3d5c 100644 --- a/.docker/configuration/configuration.py +++ b/.docker/configuration/configuration.py @@ -86,6 +86,9 @@ REDIS = { 'tasks': { 'HOST': environ.get('REDIS_HOST', 'localhost'), 'PORT': _environ_get_and_map('REDIS_PORT', 6379, _AS_INT), + 'SENTINELS': [tuple(uri.split(':')) for uri in _environ_get_and_map('REDIS_SENTINELS', '', _AS_LIST) if uri != ''], + 'SENTINEL_SERVICE': environ.get('REDIS_SENTINEL_SERVICE', 'default'), + 'SENTINEL_TIMEOUT': _environ_get_and_map('REDIS_SENTINEL_TIMEOUT', 10, _AS_INT), 'USERNAME': environ.get('REDIS_USERNAME', ''), 'PASSWORD': _read_secret('redis_password', environ.get('REDIS_PASSWORD', '')), 'DATABASE': _environ_get_and_map('REDIS_DATABASE', 0, _AS_INT), @@ -95,6 +98,8 @@ REDIS = { 'caching': { 'HOST': environ.get('REDIS_CACHE_HOST', environ.get('REDIS_HOST', 'localhost')), 'PORT': _environ_get_and_map('REDIS_CACHE_PORT', environ.get('REDIS_PORT', '6379'), _AS_INT), + 'SENTINELS': [tuple(uri.split(':')) for uri in _environ_get_and_map('REDIS_CACHE_SENTINELS', '', _AS_LIST) if uri != ''], + 'SENTINEL_SERVICE': environ.get('REDIS_CACHE_SENTINEL_SERVICE', environ.get('REDIS_SENTINEL_SERVICE', 'default')), 'USERNAME': environ.get('REDIS_CACHE_USERNAME', environ.get('REDIS_USERNAME', '')), 'PASSWORD': _read_secret('redis_cache_password', environ.get('REDIS_CACHE_PASSWORD', environ.get('REDIS_PASSWORD', ''))), 'DATABASE': _environ_get_and_map('REDIS_CACHE_DATABASE', '1', _AS_INT), @@ -183,15 +188,22 @@ EMAIL = { if 'ENFORCE_GLOBAL_UNIQUE' in environ: ENFORCE_GLOBAL_UNIQUE = _environ_get_and_map('ENFORCE_GLOBAL_UNIQUE', None, _AS_BOOL) +# By default, netbox sends census reporting data using a single HTTP request each time a worker starts. +# This data enables the project maintainers to estimate how many NetBox deployments exist and track the adoption of new versions over time. +# The only data reported by this function are the NetBox version, Python version, and a pseudorandom unique identifier. +# To opt out of census reporting, set CENSUS_REPORTING_ENABLED to False. +if 'CENSUS_REPORTING_ENABLED' in environ: + CENSUS_REPORTING_ENABLED = _environ_get_and_map('CENSUS_REPORTING_ENABLED', None, _AS_BOOL) + # Exempt certain models from the enforcement of view permissions. Models listed here will be viewable by all users and # by anonymous users. List models in the form `<app>.<model>`. Add '*' to this list to exempt all models. EXEMPT_VIEW_PERMISSIONS = _environ_get_and_map('EXEMPT_VIEW_PERMISSIONS', '', _AS_LIST) # HTTP proxies NetBox should use when sending outbound HTTP requests (e.g. for webhooks). -# HTTP_PROXIES = { -# 'http': 'http://10.10.1.10:3128', -# 'https': 'http://10.10.1.10:1080', -# } +HTTP_PROXIES = { + 'http': environ.get('HTTP_PROXY', None), + 'https': environ.get('HTTPS_PROXY', None), +} # IP addresses recognized as internal to the system. The debugging toolbar will be available only to clients accessing # NetBox from an internal IP. @@ -221,9 +233,9 @@ LOGGING = { # authenticated to NetBox indefinitely. LOGIN_PERSISTENCE = _environ_get_and_map('LOGIN_PERSISTENCE', 'False', _AS_BOOL) -# Setting this to True will permit only authenticated users to access any part of NetBox. By default, anonymous users -# are permitted to access most data in NetBox (excluding secrets) but not make any changes. -LOGIN_REQUIRED = _environ_get_and_map('LOGIN_REQUIRED', 'False', _AS_BOOL) +# When enabled, only authenticated users are permitted to access any part of NetBox. +# Disabling this will allow unauthenticated users to access most areas of NetBox (but not make any changes). +LOGIN_REQUIRED = _environ_get_and_map('LOGIN_REQUIRED', 'True', _AS_BOOL) # The length of time (in seconds) for which a user will remain logged into the web UI before being prompted to # re-authenticate. (Default: 1209600 [14 days]) @@ -286,12 +298,23 @@ if 'RACK_ELEVATION_DEFAULT_UNIT_WIDTH' in environ: RACK_ELEVATION_DEFAULT_UNIT_WIDTH = _environ_get_and_map('RACK_ELEVATION_DEFAULT_UNIT_WIDTH', None, _AS_INT) # Remote authentication support -REMOTE_AUTH_ENABLED = _environ_get_and_map('REMOTE_AUTH_ENABLED', 'False', _AS_BOOL) -REMOTE_AUTH_BACKEND = _environ_get_and_map('REMOTE_AUTH_BACKEND', 'netbox.authentication.RemoteUserBackend', _AS_LIST) -REMOTE_AUTH_HEADER = environ.get('REMOTE_AUTH_HEADER', 'HTTP_REMOTE_USER') +REMOTE_AUTH_AUTO_CREATE_GROUPS = _environ_get_and_map('REMOTE_AUTH_AUTO_CREATE_GROUPS', 'False', _AS_BOOL) REMOTE_AUTH_AUTO_CREATE_USER = _environ_get_and_map('REMOTE_AUTH_AUTO_CREATE_USER', 'False', _AS_BOOL) +REMOTE_AUTH_BACKEND = _environ_get_and_map('REMOTE_AUTH_BACKEND', 'netbox.authentication.RemoteUserBackend', _AS_LIST) REMOTE_AUTH_DEFAULT_GROUPS = _environ_get_and_map('REMOTE_AUTH_DEFAULT_GROUPS', '', _AS_LIST) -# REMOTE_AUTH_DEFAULT_PERMISSIONS = {} +# REMOTE_AUTH_DEFAULT_PERMISSIONS = {} # dicts can't be configured via environment variables. See extra.py instead. +REMOTE_AUTH_ENABLED = _environ_get_and_map('REMOTE_AUTH_ENABLED', 'False', _AS_BOOL) +REMOTE_AUTH_GROUP_HEADER = _environ_get_and_map('REMOTE_AUTH_GROUP_HEADER', 'HTTP_REMOTE_USER_GROUP') +REMOTE_AUTH_GROUP_SEPARATOR = _environ_get_and_map('REMOTE_AUTH_GROUP_SEPARATOR', '|') +REMOTE_AUTH_GROUP_SYNC_ENABLED = _environ_get_and_map('REMOTE_AUTH_GROUP_SYNC_ENABLED', 'False', _AS_BOOL) +REMOTE_AUTH_HEADER = environ.get('REMOTE_AUTH_HEADER', 'HTTP_REMOTE_USER') +REMOTE_AUTH_USER_EMAIL = environ.get('REMOTE_AUTH_USER_EMAIL', 'HTTP_REMOTE_USER_EMAIL') +REMOTE_AUTH_USER_FIRST_NAME = environ.get('REMOTE_AUTH_USER_FIRST_NAME', 'HTTP_REMOTE_USER_FIRST_NAME') +REMOTE_AUTH_USER_LAST_NAME = environ.get('REMOTE_AUTH_USER_LAST_NAME', 'HTTP_REMOTE_USER_LAST_NAME') +REMOTE_AUTH_SUPERUSER_GROUPS = _environ_get_and_map('REMOTE_AUTH_SUPERUSER_GROUPS', '', _AS_LIST) +REMOTE_AUTH_SUPERUSERS = _environ_get_and_map('REMOTE_AUTH_SUPERUSERS', '', _AS_LIST) +REMOTE_AUTH_STAFF_GROUPS = _environ_get_and_map('REMOTE_AUTH_STAFF_GROUPS', '', _AS_LIST) +REMOTE_AUTH_STAFF_USERS = _environ_get_and_map('REMOTE_AUTH_STAFF_USERS', '', _AS_LIST) # This repository is used to check whether there is a new release of NetBox available. Set to None to disable the # version check or use the URL below to check for release in the official NetBox repository. @@ -312,6 +335,23 @@ CSRF_TRUSTED_ORIGINS = _environ_get_and_map('CSRF_TRUSTED_ORIGINS', '', _AS_LIST # The name to use for the session cookie. SESSION_COOKIE_NAME = environ.get('SESSION_COOKIE_NAME', 'sessionid') +# If true, the `includeSubDomains` directive will be included in the HTTP Strict Transport Security (HSTS) header. +# This directive instructs the browser to apply the HSTS policy to all subdomains of the current domain. +SECURE_HSTS_INCLUDE_SUBDOMAINS = _environ_get_and_map('SECURE_HSTS_INCLUDE_SUBDOMAINS', 'False', _AS_BOOL) + +# If true, the `preload` directive will be included in the HTTP Strict Transport Security (HSTS) header. +# This directive instructs the browser to preload the site in HTTPS. Browsers that use the HSTS preload list will force the +# site to be accessed via HTTPS even if the user types HTTP in the address bar. +SECURE_HSTS_PRELOAD = _environ_get_and_map('SECURE_HSTS_PRELOAD', 'False', _AS_BOOL) + +# If set to a non-zero integer value, the SecurityMiddleware sets the HTTP Strict Transport Security (HSTS) header on all +# responses that do not already have it. This will instruct the browser that the website must be accessed via HTTPS, +# blocking any HTTP request. +SECURE_HSTS_SECONDS = _environ_get_and_map('SECURE_HSTS_SECONDS', 0, _AS_INT) + +# If true, all non-HTTPS requests will be automatically redirected to use HTTPS. +SECURE_SSL_REDIRECT = _environ_get_and_map('SECURE_SSL_REDIRECT', 'False', _AS_BOOL) + # By default, NetBox will store session data in the database. Alternatively, a file path can be specified here to use # local file storage instead. (This can be useful for enabling authentication on a standby instance with read-only # database access.) Note that the user as which NetBox runs must have read and write permissions to this path. @@ -320,11 +360,5 @@ SESSION_FILE_PATH = environ.get('SESSION_FILE_PATH', environ.get('SESSIONS_ROOT' # Time zone (default: UTC) TIME_ZONE = environ.get('TIME_ZONE', 'UTC') -# Date/time formatting. See the following link for supported formats: -# https://docs.djangoproject.com/en/stable/ref/templates/builtins/#date -DATE_FORMAT = environ.get('DATE_FORMAT', 'N j, Y') -SHORT_DATE_FORMAT = environ.get('SHORT_DATE_FORMAT', 'Y-m-d') -TIME_FORMAT = environ.get('TIME_FORMAT', 'g:i a') -SHORT_TIME_FORMAT = environ.get('SHORT_TIME_FORMAT', 'H:i:s') -DATETIME_FORMAT = environ.get('DATETIME_FORMAT', 'N j, Y g:i a') -SHORT_DATETIME_FORMAT = environ.get('SHORT_DATETIME_FORMAT', 'Y-m-d H:i') +# If true disables miscellaneous functionality which depends on access to the Internet. +ISOLATED_DEPLOYMENT = _environ_get_and_map('ISOLATED_DEPLOYMENT', 'False', _AS_BOOL) diff --git a/.docker/docker-compose.yml b/.docker/docker-compose.yml index 299a5fc1e11956c41a917884bfa22084e6a868b4..80d8157025e89dd447c89c8930a4c7d124b897a6 100644 --- a/.docker/docker-compose.yml +++ b/.docker/docker-compose.yml @@ -1,12 +1,11 @@ version: '3.4' services: netbox: &netbox - image: docker.io/netboxcommunity/netbox:v3.7.8 + image: docker.io/netboxcommunity/netbox:v4.2-3.2.0 depends_on: - postgres - redis - redis-cache - - netbox-worker environment: CORS_ORIGIN_ALLOW_ALL: "true" DB_HOST: postgres @@ -28,46 +27,49 @@ services: MAX_PAGE_SIZE: "1000" MEDIA_ROOT: /opt/netbox/netbox/media METRICS_ENABLED: "false" - NAPALM_PASSWORD: "" - NAPALM_TIMEOUT: "10" - NAPALM_USERNAME: "" REDIS_CACHE_DATABASE: "1" REDIS_CACHE_HOST: redis-cache + REDIS_CACHE_INSECURE_SKIP_TLS_VERIFY: "false" REDIS_CACHE_PASSWORD: eeCae8ai0hua4koK REDIS_CACHE_SSL: "false" REDIS_DATABASE: "0" REDIS_HOST: redis + REDIS_INSECURE_SKIP_TLS_VERIFY: "false" REDIS_PASSWORD: Choopike2aeBee1f REDIS_SSL: "false" RELEASE_CHECK_URL: https://api.github.com/repos/netbox-community/netbox/releases SECRET_KEY: ohL4EipaHaeng1ohrieh7xeeghoun7Qualaesoor4ahr2Daup4 - SKIP_STARTUP_SCRIPTS: "false" SKIP_SUPERUSER: "false" SUPERUSER_API_TOKEN: 0123456789abcdef0123456789abcdef01234567 SUPERUSER_EMAIL: admin@example.com SUPERUSER_NAME: admin SUPERUSER_PASSWORD: admin WEBHOOKS_ENABLED: "true" - user: '101' ports: - 8080:8080 + user: "unit:root" volumes: - ./configuration:/etc/netbox/config:z,ro - - ../netbox_awx_plugin:/opt/netbox/venv/lib/python3.11/site-packages/netbox_awx_plugin + - ../netbox_awx_plugin:/opt/netbox/venv/lib/python3.12/site-packages/netbox_awx_plugin netbox-worker: <<: *netbox ports: [] depends_on: - - redis - entrypoint: + - netbox + command: - /opt/netbox/venv/bin/python - /opt/netbox/netbox/manage.py - command: - rqworker postgres: - image: postgres:13-alpine + image: docker.io/postgres:17-alpine + healthcheck: + test: pg_isready -q -t 2 -d $$POSTGRES_DB -U $$POSTGRES_USER + start_period: 20s + timeout: 30s + interval: 10s + retries: 5 environment: POSTGRES_DB: netbox POSTGRES_PASSWORD: Oquoo9ohwohpahy5 @@ -76,7 +78,7 @@ services: - netbox_database:/var/lib/postgresql/data redis: - image: redis:6-alpine + image: redis:7-alpine command: - sh - -c @@ -85,7 +87,7 @@ services: REDIS_PASSWORD: Choopike2aeBee1f redis-cache: - image: redis:6-alpine + image: redis:7-alpine command: - sh - -c # this is to evaluate the $REDIS_PASSWORD from the env diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 96be85102721f563365ece6e9072f3e2f1961e91..de4f1d6eaf6556a4fa11116dd3c3f6224ef25e81 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -12,7 +12,7 @@ default: build-package: stage: build - image: python:3.11 + image: python:3.12 before_script: - python -m pip install build --user script: @@ -24,7 +24,7 @@ build-package: test: stage: test - image: python:3.11 + image: python:3.12 services: - name: redis variables: @@ -37,7 +37,7 @@ test: HEALTHCHECK_TCP_PORT: "5432" before_script: - pip install . - - git clone --depth 1 --branch v3.7.8 --single-branch https://github.com/netbox-community/netbox.git + - git clone --depth 1 --branch v4.2.3 --single-branch https://github.com/netbox-community/netbox.git - pip install --upgrade --requirement netbox/requirements.txt - ln -s $(pwd)/tests/configuration.py netbox/netbox/netbox/configuration.py script: @@ -46,7 +46,7 @@ test: release-package: stage: release - image: python:3.11 + image: python:3.12 before_script: - python -m pip install twine --user script: diff --git a/netbox_awx_plugin/__init__.py b/netbox_awx_plugin/__init__.py index 6eb8b546d8cb3e6fd281d5401ec513bb7ccd96aa..06d85f767b032c06263007ee7b15bd0e4d89dadf 100644 --- a/netbox_awx_plugin/__init__.py +++ b/netbox_awx_plugin/__init__.py @@ -1,11 +1,11 @@ """Top-level package for NetBox AWX Plugin.""" -__author__ = """Anders Harrisson""" +__author__ = "Anders Harrisson" __email__ = "anders.harrisson@gmail.com" __version__ = "0.3.0" -from extras.plugins import PluginConfig +from netbox.plugins import PluginConfig class AWXConfig(PluginConfig): diff --git a/netbox_awx_plugin/api/serializers.py b/netbox_awx_plugin/api/serializers.py index b31c04fdc922e51f2d03a8c0def20aea0fa162e5..73593834eb1f9bd49638a35fa2d87fc9442b1dd7 100644 --- a/netbox_awx_plugin/api/serializers.py +++ b/netbox_awx_plugin/api/serializers.py @@ -1,9 +1,14 @@ +from rest_framework import serializers + from netbox.api.serializers import NetBoxModelSerializer from ..models import AWX, AWXInventory - class AWXSerializer(NetBoxModelSerializer): + url = serializers.HyperlinkedIdentityField( + view_name='plugins-api:netbox_awx_plugin-api:awx-detail' + ) + class Meta: model = AWX fields = ( @@ -17,18 +22,32 @@ class AWXSerializer(NetBoxModelSerializer): "created", "last_updated", ) + brief_fields = ( + "id", + "display", + "name", + "url", + ) class AWXInventorySerializer(NetBoxModelSerializer): + url = serializers.HyperlinkedIdentityField( + view_name='plugins-api:netbox_awx_plugin-api:awxinventory-detail' + ) + class Meta: model = AWXInventory fields = ( "id", "display", - "awx", "inventory_id", "tags", "custom_fields", "created", "last_updated", ) + brief_fields = ( + "id", + "display", + "inventory_id", + ) diff --git a/netbox_awx_plugin/api/urls.py b/netbox_awx_plugin/api/urls.py new file mode 100644 index 0000000000000000000000000000000000000000..02f571f91aab861cde8f561fe387b341e2a544a7 --- /dev/null +++ b/netbox_awx_plugin/api/urls.py @@ -0,0 +1,10 @@ +from netbox.api.routers import NetBoxRouter +from . import views + +app_name = 'netbox_awx_plugin' + +router = NetBoxRouter() +router.register('awxs', views.AWXViewSet) +router.register('awxinventories', views.AWXInventoryViewSet) + +urlpatterns = router.urls diff --git a/netbox_awx_plugin/api/views.py b/netbox_awx_plugin/api/views.py new file mode 100644 index 0000000000000000000000000000000000000000..04d6ebd15f86ce85e2421d9eeff527fdd7dc6ce4 --- /dev/null +++ b/netbox_awx_plugin/api/views.py @@ -0,0 +1,12 @@ +from netbox.api.viewsets import NetBoxModelViewSet + +from .. import filtersets, models +from .serializers import AWXSerializer, AWXInventorySerializer + +class AWXViewSet(NetBoxModelViewSet): + queryset = models.AWX.objects.prefetch_related('tags') + serializer_class = AWXSerializer + +class AWXInventoryViewSet(NetBoxModelViewSet): + queryset = models.AWXInventory.objects.prefetch_related('tags') + serializer_class = AWXInventorySerializer diff --git a/netbox_awx_plugin/models.py b/netbox_awx_plugin/models.py index 9473c6009cd723e642e8d20b97a88a403031018d..03533b3d875ab4b6b90b9725fd98098ee1f2e3ee 100644 --- a/netbox_awx_plugin/models.py +++ b/netbox_awx_plugin/models.py @@ -14,6 +14,8 @@ class AWX(NetBoxModel): token = models.CharField(max_length=200, verbose_name=("Token")) class Meta: + verbose_name = ("AWX") + verbose_name_plural = ("AWXs") ordering = ("name", "url", "token") def __str__(self): @@ -37,6 +39,8 @@ class AWXInventory(NetBoxModel): enabled = models.BooleanField(default=True) class Meta: + verbose_name = ("AWX Inventory") + verbose_name_plural = ("AWX Inventories") ordering = ("awx", "inventory_id", "enabled") def get_absolute_url(self): diff --git a/netbox_awx_plugin/navigation.py b/netbox_awx_plugin/navigation.py index 923b28cc83c32c8085f4851a506190e68785a8b8..740dd61090bb1ebcf11e43c3da99e2fa732c2d21 100644 --- a/netbox_awx_plugin/navigation.py +++ b/netbox_awx_plugin/navigation.py @@ -1,12 +1,10 @@ -from extras.plugins import PluginMenuButton, PluginMenuItem -from utilities.choices import ButtonColorChoices +from netbox.plugins import PluginMenuButton, PluginMenuItem plugin_buttons = [ PluginMenuButton( link="plugins:netbox_awx_plugin:awx_add", title="Add", icon_class="mdi mdi-plus-thick", - color=ButtonColorChoices.GREEN, permissions=['netbox_awx_plugin.add_awx'], # Add permission for adding AWX ) ] @@ -16,7 +14,6 @@ awxinventory_buttons = [ link="plugins:netbox_awx_plugin:awxinventory_add", title="Add inventory", icon_class="mdi mdi-plus-thick", - color=ButtonColorChoices.GREEN, permissions=['netbox_awx_plugin.add_awxinventory'], # Add permission for adding AWX Inventory ) ] diff --git a/netbox_awx_plugin/serializers.py b/netbox_awx_plugin/serializers.py index f21e5c1858026734b0284f1a852d9c87f17cd3ca..d9567b64fbca72a7f21a79edf8dfc000d1c1760a 100644 --- a/netbox_awx_plugin/serializers.py +++ b/netbox_awx_plugin/serializers.py @@ -66,16 +66,27 @@ class PrefixSerializer(serializers.BaseSerializer): class InterfaceSerializer(serializers.BaseSerializer): def to_representation(self, instance): + mac_addresses = [] + for mac_address in instance.mac_addresses.all(): + serializer = MACAddressSerializer(mac_address) + mac_addresses.append(serializer.data) ip_addresses = [] for ip in instance.ip_addresses.all(): serializer = IPAddressSerializer(ip) ip_addresses.append(serializer.data) return { "name": instance.name, - "mac": str(instance.mac_address) if instance.mac_address else None, + "primary_mac_address": str(instance.primary_mac_address) if instance.primary_mac_address else None, + "mac_addresses": mac_addresses, "ip_addresses": ip_addresses } +class MACAddressSerializer(serializers.BaseSerializer): + def to_representation(self, instance): + return { + "mac_address": str(instance.mac_address) + } + class IPAddressSerializer(serializers.BaseSerializer): def to_representation(self, instance): return { @@ -102,13 +113,18 @@ class DeviceSerializer(serializers.BaseSerializer): class VMInterfaceSerializer(serializers.BaseSerializer): def to_representation(self, instance): + mac_addresses = [] + for mac_address in instance.mac_addresses.all(): + serializer = MACAddressSerializer(mac_address) + mac_addresses.append(serializer.data) ip_addresses = [] for ip in instance.ip_addresses.all(): serializer = IPAddressSerializer(ip) ip_addresses.append(serializer.data) return { "name": instance.name, - "mac": str(instance.mac_address) if instance.mac_address else None, + "primary_mac_address": str(instance.primary_mac_address) if instance.primary_mac_address else None, + "mac_addresses": mac_addresses, "ip_addresses": ip_addresses } diff --git a/netbox_awx_plugin/signals.py b/netbox_awx_plugin/signals.py index 44a073e8bc11fca1a170a7d3582b02fcf5a8456c..b13d315f34abff26d436e96e6390524f5319a9c0 100644 --- a/netbox_awx_plugin/signals.py +++ b/netbox_awx_plugin/signals.py @@ -36,6 +36,9 @@ def process_inventory_task(task_name, sender, instance): for inventory in inventories: enqueue_task(task_name, inventory, sender, instance) +def validate_host(instance): + return instance.primary_ip4 and instance.primary_ip4.dns_name + @receiver(post_save, sender=Site) @receiver(post_save, sender=DeviceRole) @@ -86,7 +89,7 @@ def handle_device_post_save(sender, instance, **kwargs): Synchronizes AWX hosts if the instance meets the IP and DNS requirements. """ instance = sender.objects.select_related("primary_ip4").get(pk=instance.pk) - if instance.primary_ip4 and instance.primary_ip4.dns_name: + if validate_host(instance): logger.debug(f"Device/VM {instance.pk} meets IP/DNS criteria. Triggering host sync.") process_inventory_task("sync_host", sender, instance) else: @@ -106,10 +109,10 @@ def handle_interface_post_save(sender, instance, **kwargs): @receiver(m2m_changed, sender=Device.tags.through) def handle_device_tag_assignment(sender, instance, action, reverse, model, pk_set, **kwargs): - if isinstance(instance, Device): + if isinstance(instance, Device) and validate_host(instance): process_inventory_task("sync_host", Device, instance) @receiver(m2m_changed, sender=VirtualMachine.tags.through) def handle_vm_tag_assignment(sender, instance, action, reverse, model, pk_set, **kwargs): - if isinstance(instance, VirtualMachine): + if isinstance(instance, VirtualMachine) and validate_host(instance): process_inventory_task("sync_host", VirtualMachine, instance) diff --git a/netbox_awx_plugin/synchronization.py b/netbox_awx_plugin/synchronization.py index 7cf1ee280abca4918ebd4c0307c3fd911384254b..42483d1242540f98ceee43fc6b222b6a88df6e84 100644 --- a/netbox_awx_plugin/synchronization.py +++ b/netbox_awx_plugin/synchronization.py @@ -4,6 +4,7 @@ from dcim.models import Device, DeviceRole, DeviceType, Site from ipam.models import Prefix from virtualization.models import VirtualMachine from extras.models import Tag +from netbox.jobs import JobRunner from .models import AWXInventory from .serializers import ( serializers_dict, @@ -190,157 +191,159 @@ def delete_group(inventory, sender, instance): serializer = serializer_class(instance) inventory.delete_group(serializer.data["name"]) -def sync_all(job): - """ - Performs a full synchronization of the AWX inventory, including host-group associations. - Optimized to check for existing groups and hosts before creating or updating them. - """ - inventory = job.object - logger.info( - f"Performing full inventory sync for inventory {inventory.inventory_id}" - ) - job.start() - - # Collect all AWX groups and hosts - awx_groups = inventory.get_all_groups() - awx_hosts = inventory.get_all_hosts() - - # Synchronize groups - group_models = [Site, DeviceRole, DeviceType, Prefix, Tag] - netbox_group_names = set() - for model in group_models: - instances = model.objects.all() - for instance in instances: - serializer = serializers_dict[model](instance) - group_name = serializer.data["name"] - netbox_group_names.add(group_name) - - if group_name in awx_groups: - # Group exists in AWX, check if it needs updating - awx_group = awx_groups[group_name] - awx_group_serializer = AWXGroupSerializer(data=awx_group) - if awx_group_serializer.is_valid(): - awx_group_data = awx_group_serializer.validated_data - if awx_group_data != serializer.data: - inventory.update_group(awx_group["id"], serializer.data) - logger.info(f"Updated group {group_name} in AWX.") - else: - logger.error(f"Invalid data for awx_group_serializer: {awx_group_serializer.errors}") - else: - # Group does not exist in AWX, create it - awx_group = inventory.create_group(serializer.data) - if awx_group: - logger.info(f"Created group {group_name} in AWX.") - # Update local cache - awx_groups[group_name] = awx_group +class FullInventorySyncJob(JobRunner): + + class Meta: + name = "Full AWX Inventory sync" + + def run(self, *args, **kwargs): + """ + Performs a full synchronization of the AWX inventory, including host-group associations. + Optimized to check for existing groups and hosts before creating or updating them. + """ + inventory = kwargs['inventory'] + logger.info( + f"Performing full inventory sync for inventory {inventory.inventory_id}" + ) + + # Collect all AWX groups and hosts + awx_groups = inventory.get_all_groups() + awx_hosts = inventory.get_all_hosts() + + # Synchronize groups + group_models = [Site, DeviceRole, DeviceType, Prefix, Tag] + netbox_group_names = set() + for model in group_models: + instances = model.objects.all() + for instance in instances: + serializer = serializers_dict[model](instance) + group_name = serializer.data["name"] + netbox_group_names.add(group_name) + + if group_name in awx_groups: + # Group exists in AWX, check if it needs updating + awx_group = awx_groups[group_name] + awx_group_serializer = AWXGroupSerializer(data=awx_group) + if awx_group_serializer.is_valid(): + awx_group_data = awx_group_serializer.validated_data + if awx_group_data != serializer.data: + inventory.update_group(awx_group["id"], serializer.data) + logger.info(f"Updated group {group_name} in AWX.") + else: + logger.error(f"Invalid data for awx_group_serializer: {awx_group_serializer.errors}") else: - logger.error( - f"Failed to create group {group_name} in AWX." - ) - - awx_group_names = set(awx_groups.keys()) - # Delete groups in AWX that are not in NetBox - groups_to_delete = awx_group_names - netbox_group_names - for group_name in groups_to_delete: - inventory.delete_group(group_name) - logger.info(f"Deleted group {group_name} from AWX as it no longer exists in NetBox.") - - # Synchronize hosts - host_models = [Device, VirtualMachine] - netbox_host_names = set() - for model in host_models: - instances = model.objects.select_related( - "primary_ip4" - ).prefetch_related("interfaces__ip_addresses") - for instance in instances: - if not (instance.primary_ip4 and instance.primary_ip4.dns_name): - continue - serializer = serializers_dict[model](instance) - host_name = serializer.data["name"] - netbox_host_names.add(host_name) - - if host_name in awx_hosts: - # Host exists in AWX, check if it needs updating - awx_host = awx_hosts[host_name] - awx_host_serializer = AWXHostSerializer(data=awx_host) - if awx_host_serializer.is_valid(): - awx_host_data = awx_host_serializer.validated_data - if awx_host_data != serializer.data: - inventory.update_host(awx_host["id"], serializer.data) - logger.info(f"Updated host {host_name} in AWX.") - host_id = awx_host["id"] + # Group does not exist in AWX, create it + awx_group = inventory.create_group(serializer.data) + if awx_group: + logger.info(f"Created group {group_name} in AWX.") + # Update local cache + awx_groups[group_name] = awx_group + else: + logger.error( + f"Failed to create group {group_name} in AWX." + ) + + awx_group_names = set(awx_groups.keys()) + # Delete groups in AWX that are not in NetBox + groups_to_delete = awx_group_names - netbox_group_names + for group_name in groups_to_delete: + inventory.delete_group(group_name) + logger.info(f"Deleted group {group_name} from AWX as it no longer exists in NetBox.") + + # Synchronize hosts + host_models = [Device, VirtualMachine] + netbox_host_names = set() + for model in host_models: + instances = model.objects.select_related( + "primary_ip4" + ).prefetch_related("interfaces__ip_addresses") + for instance in instances: + if not (instance.primary_ip4 and instance.primary_ip4.dns_name): + continue + serializer = serializers_dict[model](instance) + host_name = serializer.data["name"] + netbox_host_names.add(host_name) + + if host_name in awx_hosts: + # Host exists in AWX, check if it needs updating + awx_host = awx_hosts[host_name] + awx_host_serializer = AWXHostSerializer(data=awx_host) + if awx_host_serializer.is_valid(): + awx_host_data = awx_host_serializer.validated_data + if awx_host_data != serializer.data: + inventory.update_host(awx_host["id"], serializer.data) + logger.info(f"Updated host {host_name} in AWX.") + host_id = awx_host["id"] + else: + logger.error(f"Invalid data for awx_host_serializer: {awx_host_serializer.errors}") + continue # Skip this host or handle the error appropriately else: - logger.error(f"Invalid data for awx_host_serializer: {awx_host_serializer.errors}") - continue # Skip this host or handle the error appropriately - else: - # Host does not exist in AWX, create it - awx_host = inventory.create_host(serializer.data) - if awx_host: - logger.info(f"Created host {host_name} in AWX.") - # Update local cache - awx_hosts[host_name] = awx_host - host_id = awx_host["id"] + # Host does not exist in AWX, create it + awx_host = inventory.create_host(serializer.data) + if awx_host: + logger.info(f"Created host {host_name} in AWX.") + # Update local cache + awx_hosts[host_name] = awx_host + host_id = awx_host["id"] + else: + logger.error( + f"Failed to create host {host_name} in AWX." + ) + continue # Skip to the next host or handle the error as appropriate + + # Synchronize host-group associations + if awx_host["summary_fields"]["groups"]["count"] > len( + awx_host["summary_fields"]["groups"]["results"] + ): + current_groups = inventory.get_host_groups(host_id) else: - logger.error( - f"Failed to create host {host_name} in AWX." - ) - continue # Skip to the next host or handle the error as appropriate - - # Synchronize host-group associations - if awx_host["summary_fields"]["groups"]["count"] > len( - awx_host["summary_fields"]["groups"]["results"] - ): - current_groups = inventory.get_host_groups(host_id) - else: - current_groups = awx_host["summary_fields"]["groups"]["results"] - current_group_names = set(group["name"] for group in current_groups) - - valid_group_names = set() - # Collect valid group names for this host - if hasattr(instance, 'site') and instance.site: - group_name = f"{group_prefixes[Site]}{instance.site.slug.replace('-', '_')}" - valid_group_names.add(group_name) - if hasattr(instance, 'role') and instance.role: - group_name = f"{group_prefixes[DeviceRole]}{instance.role.slug.replace('-', '_')}" - valid_group_names.add(group_name) - if isinstance(instance, Device) and instance.device_type: - group_name = f"{group_prefixes[DeviceType]}{instance.device_type.slug.replace('-', '_')}" - valid_group_names.add(group_name) - if hasattr(instance, 'tags'): - tags = instance.tags.all() - for tag in tags: - group_name = f"{group_prefixes[Tag]}{tag.slug.replace('-', '_')}" - valid_group_names.add(group_name) + current_groups = awx_host["summary_fields"]["groups"]["results"] + current_group_names = set(group["name"] for group in current_groups) - # Associate host with missing groups - groups_to_associate = valid_group_names - current_group_names - for group_name in groups_to_associate: - group = awx_groups.get(group_name) - if group: - inventory.associate_host_group(host_id, group["id"]) - logger.info( - f"Associated host {host_name} with group {group_name}." - ) - else: - logger.error( - f"Group {group_name} not found in AWX when trying to associate with host {host_name}." - ) - - # Disassociate host from groups that are no longer valid - groups_to_disassociate = current_group_names - valid_group_names - for group_name in groups_to_disassociate: - group = awx_groups.get(group_name) - if group: - inventory.disassociate_host_group(host_id, group["id"]) - logger.info( - f"Disassociated host {host_name} from group {group_name}." - ) - - # Delete hosts in AWX that are not in NetBox - awx_host_names =set(awx_hosts.keys()) - hosts_to_delete = awx_host_names - netbox_host_names - for host_name in hosts_to_delete: - inventory.delete_host(host_name) - logger.info(f"Deleted host {host_name} from AWX as it no longer exists in NetBox.") - - job.terminate() + valid_group_names = set() + # Collect valid group names for this host + if hasattr(instance, 'site') and instance.site: + group_name = f"{group_prefixes[Site]}{instance.site.slug.replace('-', '_')}" + valid_group_names.add(group_name) + if hasattr(instance, 'role') and instance.role: + group_name = f"{group_prefixes[DeviceRole]}{instance.role.slug.replace('-', '_')}" + valid_group_names.add(group_name) + if isinstance(instance, Device) and instance.device_type: + group_name = f"{group_prefixes[DeviceType]}{instance.device_type.slug.replace('-', '_')}" + valid_group_names.add(group_name) + if hasattr(instance, 'tags'): + tags = instance.tags.all() + for tag in tags: + group_name = f"{group_prefixes[Tag]}{tag.slug.replace('-', '_')}" + valid_group_names.add(group_name) + + # Associate host with missing groups + groups_to_associate = valid_group_names - current_group_names + for group_name in groups_to_associate: + group = awx_groups.get(group_name) + if group: + inventory.associate_host_group(host_id, group["id"]) + logger.info( + f"Associated host {host_name} with group {group_name}." + ) + else: + logger.error( + f"Group {group_name} not found in AWX when trying to associate with host {host_name}." + ) + + # Disassociate host from groups that are no longer valid + groups_to_disassociate = current_group_names - valid_group_names + for group_name in groups_to_disassociate: + group = awx_groups.get(group_name) + if group: + inventory.disassociate_host_group(host_id, group["id"]) + logger.info( + f"Disassociated host {host_name} from group {group_name}." + ) + + # Delete hosts in AWX that are not in NetBox + awx_host_names =set(awx_hosts.keys()) + hosts_to_delete = awx_host_names - netbox_host_names + for host_name in hosts_to_delete: + inventory.delete_host(host_name) + logger.info(f"Deleted host {host_name} from AWX as it no longer exists in NetBox.") diff --git a/netbox_awx_plugin/templates/netbox_awx_plugin/awx.html b/netbox_awx_plugin/templates/netbox_awx_plugin/awx.html index 59a3479f4b54ec7b05f47d995fac0fde6a0e0bb4..1ab8301e25fd0c893a0a816a41904dde3e736047 100644 --- a/netbox_awx_plugin/templates/netbox_awx_plugin/awx.html +++ b/netbox_awx_plugin/templates/netbox_awx_plugin/awx.html @@ -9,14 +9,12 @@ <h5 class="card-header">NetBox AWX Plugin</h5> - <div class="card-body"> - <table class="table table-hover attr-table"> - <tr> - <th scope="row">Name</th> - <td>{{ object.name }}</td> - </tr> - </table> - </div> + <table class="table table-hover attr-table"> + <tr> + <th scope="row">Name</th> + <td>{{ object.name }}</td> + </tr> + </table> </div> {% include 'inc/panels/custom_fields.html' %} </div> diff --git a/netbox_awx_plugin/templates/netbox_awx_plugin/awxinventory.html b/netbox_awx_plugin/templates/netbox_awx_plugin/awxinventory.html index d5635be8363d105a01cc5b71abf861b1e083d5d6..9a97935cd6ddd2e5b437fad7cefb977fe7b23495 100644 --- a/netbox_awx_plugin/templates/netbox_awx_plugin/awxinventory.html +++ b/netbox_awx_plugin/templates/netbox_awx_plugin/awxinventory.html @@ -5,7 +5,7 @@ {% block extra_controls %} <form action="{% url 'plugins:netbox_awx_plugin:awxinventory_sync' pk=object.pk %}" method="post"> {% csrf_token %} - <button type="submit" class="btn btn-sm btn-primary"> + <button type="submit" class="btn btn-primary"> <i class="mdi mdi-sync" aria-hidden="true"></i> {% trans "Sync" %} </button> </form> @@ -20,14 +20,12 @@ <h5 class="card-header">NetBox AWX Plugin</h5> - <div class="card-body"> - <table class="table table-hover attr-table"> - <tr> - <th scope="row">Id</th> - <td>{{ object.id }}</td> - </tr> - </table> - </div> + <table class="table table-hover attr-table"> + <tr> + <th scope="row">Id</th> + <td>{{ object.id }}</td> + </tr> + </table> </div> {% include 'inc/panels/custom_fields.html' %} </div> diff --git a/netbox_awx_plugin/tests/test_serializers.py b/netbox_awx_plugin/tests/test_serializers.py index df1580cb2d132401396749b55202c03506cc7189..8ad5c22d7676621c9623c9771ac6f00e3817c25f 100644 --- a/netbox_awx_plugin/tests/test_serializers.py +++ b/netbox_awx_plugin/tests/test_serializers.py @@ -1,6 +1,6 @@ import json from django.test import TestCase -from dcim.models import Site, DeviceRole, DeviceType, Device, Interface, Manufacturer +from dcim.models import Site, DeviceRole, DeviceType, Device, Interface, Manufacturer, MACAddress from ipam.models import Prefix, IPAddress from virtualization.models import VirtualMachine, VMInterface from extras.models import Tag @@ -148,15 +148,18 @@ class InterfaceSerializerTest(TestCase): ) device = Device.objects.create( name='Test Device', - device_role=device_role, + role=device_role, device_type=device_type, site=site ) interface = Interface.objects.create( name='eth0', - device=device, + device=device + ) + mac_address = MACAddress.objects.create( mac_address='00:11:22:33:44:55' ) + interface.mac_addresses.add(mac_address) ip_address = IPAddress.objects.create( address='192.168.1.10/24', dns_name='test-device.example.com' @@ -166,7 +169,12 @@ class InterfaceSerializerTest(TestCase): serializer = InterfaceSerializer(interface) expected_data = { "name": 'eth0', - "mac": '00:11:22:33:44:55', + "primary_mac_address": None, + "mac_addresses": [ + { + "mac_address": '00:11:22:33:44:55' + } + ], "ip_addresses": [ { "address": '192.168.1.10/24', @@ -199,7 +207,7 @@ class DeviceSerializerTest(TestCase): ) device = Device.objects.create( name='Test Device', - device_role=device_role, + role=device_role, device_type=device_type, site=site, status=DeviceStatusChoices.STATUS_ACTIVE, @@ -207,9 +215,12 @@ class DeviceSerializerTest(TestCase): ) interface = Interface.objects.create( name='eth0', - device=device, + device=device + ) + mac_address = MACAddress.objects.create( mac_address='00:11:22:33:44:55' ) + interface.mac_addresses.add(mac_address) ip_address = IPAddress.objects.create( address='192.168.1.10/24', dns_name='test-device.example.com' @@ -230,7 +241,12 @@ class DeviceSerializerTest(TestCase): "netbox_interfaces": [ { "name": 'eth0', - "mac": '00:11:22:33:44:55', + "primary_mac_address": None, + "mac_addresses": [ + { + "mac_address": '00:11:22:33:44:55' + } + ], "ip_addresses": [ { "address": '192.168.1.10/24', @@ -254,9 +270,13 @@ class VMInterfaceSerializerTest(TestCase): # Create a VMInterface vm_interface = VMInterface.objects.create( virtual_machine=vm, - name='eth0', + name='eth0' + ) + # Create a MACAddress and assign it to the VMInterface + mac_address = MACAddress.objects.create( mac_address='00:11:22:33:44:55' ) + vm_interface.mac_addresses.add(mac_address) # Create an IPAddress and assign it to the VMInterface ip_address = IPAddress.objects.create( address='192.168.1.20/24', @@ -267,7 +287,12 @@ class VMInterfaceSerializerTest(TestCase): serializer = VMInterfaceSerializer(vm_interface) expected_data = { "name": 'eth0', - "mac": '00:11:22:33:44:55', + "primary_mac_address": None, + "mac_addresses": [ + { + "mac_address": '00:11:22:33:44:55' + } + ], "ip_addresses": [ { "address": '192.168.1.20/24', @@ -292,9 +317,13 @@ class VMSerializerTest(TestCase): # Create a VMInterface vm_interface = VMInterface.objects.create( virtual_machine=vm, - name='eth0', + name='eth0' + ) + # Create a MACAddress and assign it to the VMInterface + mac_address = MACAddress.objects.create( mac_address='00:11:22:33:44:55' ) + vm_interface.mac_addresses.add(mac_address) # Create an IPAddress and assign it to the VMInterface ip_address = IPAddress.objects.create( address='192.168.1.20/24', @@ -320,7 +349,12 @@ class VMSerializerTest(TestCase): "netbox_interfaces": [ { "name": 'eth0', - "mac": '00:11:22:33:44:55', + "primary_mac_address": None, + "mac_addresses": [ + { + "mac_address": '00:11:22:33:44:55' + } + ], "ip_addresses": [ { "address": '192.168.1.20/24', @@ -401,7 +435,7 @@ class DeviceSerializerNoPrimaryIPTest(TestCase): ) device = Device.objects.create( name='Test Device', - device_role=device_role, + role=device_role, device_type=device_type, site=site, status=DeviceStatusChoices.STATUS_ACTIVE, @@ -409,9 +443,12 @@ class DeviceSerializerNoPrimaryIPTest(TestCase): ) interface = Interface.objects.create( name='eth0', - device=device, + device=device + ) + mac_address = MACAddress.objects.create( mac_address='00:11:22:33:44:55' ) + interface.mac_addresses.add(mac_address) ip_address = IPAddress.objects.create( address='192.168.1.10/24', dns_name='test-device.example.com' @@ -430,7 +467,12 @@ class DeviceSerializerNoPrimaryIPTest(TestCase): "netbox_interfaces": [ { "name": 'eth0', - "mac": '00:11:22:33:44:55', + "primary_mac_address": None, + "mac_addresses": [ + { + "mac_address": '00:11:22:33:44:55' + } + ], "ip_addresses": [ { "address": '192.168.1.10/24', @@ -460,9 +502,13 @@ class VMSerializerNoPrimaryIPTest(TestCase): # Create a VMInterface vm_interface = VMInterface.objects.create( virtual_machine=vm, - name='eth0', + name='eth0' + ) + # Create a MACAddress and assign it to the VMInterface + mac_address = MACAddress.objects.create( mac_address='00:11:22:33:44:55' ) + vm_interface.mac_addresses.add(mac_address) # Create an IPAddress and assign it to the VMInterface ip_address = IPAddress.objects.create( address='192.168.1.20/24', @@ -486,7 +532,12 @@ class VMSerializerNoPrimaryIPTest(TestCase): "netbox_interfaces": [ { "name": 'eth0', - "mac": '00:11:22:33:44:55', + "primary_mac_address": None, + "mac_addresses": [ + { + "mac_address": '00:11:22:33:44:55' + } + ], "ip_addresses": [ { "address": '192.168.1.20/24', diff --git a/netbox_awx_plugin/tests/test_signals.py b/netbox_awx_plugin/tests/test_signals.py index b99a41edf6acdbd710b70cfed43ab9d9fd0a4040..f573025d2e69cf6a860f4dfcf43f9516067e0b67 100644 --- a/netbox_awx_plugin/tests/test_signals.py +++ b/netbox_awx_plugin/tests/test_signals.py @@ -93,7 +93,7 @@ class SignalsTestCase(TestCase): # Create a Device without primary IP device = Device.objects.create( name='Test Device', - device_role=device_role, + role=device_role, device_type=device_type, site=site, status='active', @@ -129,7 +129,7 @@ class SignalsTestCase(TestCase): # Create a Device without primary IP and DNS name device = Device.objects.create( name='Test Device', - device_role=device_role, + role=device_role, device_type=device_type, site=site, status='active', @@ -150,11 +150,16 @@ class SignalsTestCase(TestCase): site = Site.objects.create(name='Test Site', slug='test-site', status='active') device = Device.objects.create( name='Test Device', - device_role=device_role, + role=device_role, device_type=device_type, site=site, status='active', ) + ip_address = IPAddress.objects.create( + address='192.0.2.1/24', + dns_name='test-device.example.com' + ) + device.primary_ip4 = ip_address # Reset the mock after creating related objects mock_enqueue_task.reset_mock() @@ -264,7 +269,7 @@ class SignalsTestCase(TestCase): site = Site.objects.create(name='Test Site', slug='test-site', status='active') device = Device.objects.create( name='Test Device', - device_role=device_role, + role=device_role, device_type=device_type, site=site, status='active', @@ -293,6 +298,11 @@ class SignalsTestCase(TestCase): # Create a Tag and assign it to a VirtualMachine tag = Tag.objects.create(name='Test Tag', slug='test-tag') vm = VirtualMachine.objects.create(name='Test VM', status='active') + ip_address = IPAddress.objects.create( + address='192.0.2.1/24', + dns_name='test-device.example.com' + ) + vm.primary_ip4 = ip_address # Reset the mock after creating related objects mock_enqueue_task.reset_mock() diff --git a/netbox_awx_plugin/tests/test_synchronization.py b/netbox_awx_plugin/tests/test_synchronization.py index 8ef3b18326a875bc84dd80ed1da7162bc6ee26e8..1022b0b217bd084cc39640cae6fba57ddbb2e806 100644 --- a/netbox_awx_plugin/tests/test_synchronization.py +++ b/netbox_awx_plugin/tests/test_synchronization.py @@ -12,12 +12,14 @@ from netbox_awx_plugin.synchronization import ( sync_group, delete_host, delete_group, - sync_all, sync_host_group_association, disassociate_removed_groups, + FullInventorySyncJob ) -from django.contrib.contenttypes.models import ContentType +import logging + +logger = logging.getLogger(__name__) class SynchronizationTestCase(TestCase): @@ -47,7 +49,7 @@ class SynchronizationTestCase(TestCase): self.site = Site.objects.create(name='Test Site', slug='test-site', status='active') self.device = Device.objects.create( name='Test Device', - device_role=self.device_role, + role=self.device_role, device_type=self.device_type, site=self.site, status='active', @@ -166,7 +168,7 @@ class SynchronizationTestCase(TestCase): } sync_host(self.awx_inventory, Device, self.device) # Check that the host is associated with the tag group - mock_associate_host_group.assert_any_call(1, 3) + mock_associate_host_group.assert_any_call(1, 3) @patch('netbox_awx_plugin.models.AWXInventory.disassociate_host_group') @patch('netbox_awx_plugin.models.AWXInventory.get_host') @@ -301,9 +303,8 @@ class SynchronizationTestCase(TestCase): mock_get_group.return_value = mock_create_group.return_value mock_get_host_groups.return_value = [] - job = Mock() - job.object = self.awx_inventory - sync_all(job) + job = FullInventorySyncJob(Mock()) + job.run(inventory=self.awx_inventory) # Ensure that create_group and create_host are called mock_create_group.assert_called() diff --git a/netbox_awx_plugin/tests/test_views.py b/netbox_awx_plugin/tests/test_views.py index 135d33755aa3d863e0a60ec0017f832ed9b2fe9e..6e900587cf943809267ba26d52acbb29c801ef1b 100644 --- a/netbox_awx_plugin/tests/test_views.py +++ b/netbox_awx_plugin/tests/test_views.py @@ -3,7 +3,7 @@ from django.test import TestCase, Client from django.urls import reverse from netbox_awx_plugin.models import AWX, AWXInventory -from django.contrib.auth.models import User +from users.models import User from unittest.mock import patch @@ -82,4 +82,4 @@ class ViewsTestCase(TestCase): self.assertEqual(response.status_code, 200) messages = list(response.context['messages']) self.assertTrue(any("Queued job" in str(message) for message in messages)) - mock_enqueue.assert_called_once() \ No newline at end of file + mock_enqueue.assert_called_once() diff --git a/netbox_awx_plugin/urls.py b/netbox_awx_plugin/urls.py index 9dcc9eb1ffacaf5fc32cc3c2c14ed25f9839b91c..2cc8270709ceec2676593688053a85b1e2198203 100644 --- a/netbox_awx_plugin/urls.py +++ b/netbox_awx_plugin/urls.py @@ -3,6 +3,7 @@ from netbox.views.generic import ObjectChangeLogView from . import models, views +app_name = 'netbox_awx_plugin' urlpatterns = ( path("awxs/", views.AWXListView.as_view(), name="awx_list"), diff --git a/netbox_awx_plugin/views.py b/netbox_awx_plugin/views.py index 4e9c2f2200d0d9152ba430c54f5a549c5b906275..a72be29e8d07d1efe251b60fcd13b37d59f7d09d 100644 --- a/netbox_awx_plugin/views.py +++ b/netbox_awx_plugin/views.py @@ -6,7 +6,7 @@ from .api.serializers import AWXSerializer, AWXInventorySerializer from django.shortcuts import redirect, render from core.models import Job from .models import AWXInventory -from .synchronization import sync_all +from .synchronization import FullInventorySyncJob import logging from django.contrib import messages @@ -58,7 +58,7 @@ class AWXInventorySyncView(generic.ObjectView): def post(self, request, pk, *args, **kwargs): logger.info("Sync inventory") inventory = AWXInventory.objects.get(id=pk) - job = Job.enqueue(sync_all, instance=inventory, name="Full sync of inventory") + job = FullInventorySyncJob.enqueue(inventory=inventory) messages.success(request, f"Queued job #{job.pk} to sync inventory {inventory}") return render( diff --git a/pyproject.toml b/pyproject.toml index b3f88cfa5a406875502886b0c0fe5ac086269390..b8cf072571cf26226cbbc0abb310deb984a26a1a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,13 +19,12 @@ classifiers=[ 'Intended Audience :: Developers', 'Natural Language :: English', "Programming Language :: Python :: 3 :: Only", - 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', 'Programming Language :: Python :: 3.12', ] -requires-python = ">=3.8.1" +requires-python = ">=3.10.0" [project.optional-dependencies] test = [ @@ -44,7 +43,7 @@ Tracker = "https://github.com/aharrisson/netbox-awx-plugin/issues" [tool.black] line-length = 120 -target_version = ['py39', 'py310', 'py311', 'py312'] +target_version = ['py310', 'py311', 'py312'] [tool.setuptools.package-data] netbox_awx_plugin = ["templates/**"]