diff --git a/CHANGELOG.rst b/CHANGELOG.rst index ca70a27d2..354485e4f 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,44 @@ Dell EMC OpenManage Ansible Modules Release Notes .. contents:: Topics +v5.2.0 +====== + +Release Summary +--------------- + +Support to configure console preferences on OpenManage Enterprise. + +Minor Changes +------------- + +- idrac_redfish_storage_controller - This module is enhanced to support the following settings with check mode and idempotency - UnassignSpare, EnableControllerEncryption, BlinkTarget, UnBlinkTarget, ConvertToRAID, ConvertToNonRAID, ChangePDStateToOnline, ChangePDStateToOffline. +- ome_diagnostics - The module is enhanced to support check mode and idempotency. (https://github.com/dell/dellemc-openmanage-ansible-modules/issues/345) +- ome_diagnostics - This module is enhanced to extract log from lead chassis. (https://github.com/dell/dellemc-openmanage-ansible-modules/issues/310) +- ome_profile - The module is enhanced to support check mode and idempotency. +- ome_profile - The module is enhanced to support modifying a profile based on the attribute names instead of the ID. +- ome_template - The module is enhanced to support check mode and idempotency. (https://github.com/dell/dellemc-openmanage-ansible-modules/issues/255) +- ome_template - The module is enhanced to support modifying a template based on the attribute names instead of the ID. (https://github.com/dell/dellemc-openmanage-ansible-modules/issues/358) + +Known Issues +------------ + +- idrac_user - Issue(192043) The module may error out with the message ``unable to perform the import or export operation because there are pending attribute changes or a configuration job is in progress``. Wait for the job to complete and run the task again. +- ome_application_alerts_smtp - Issue(212310) - The module does not provide a proper error message if the destination_address is more than 255 characters. +- ome_application_alerts_syslog - Issue(215374) - The module does not provide a proper error message if the destination_address is more than 255 characters. +- ome_application_console_preferences - Issue(224690) - The module does not display a proper error message when an unsupported value is provided for the parameters report_row_limit, email_sender_settings, and metric_collection_settings, and the value is applied on OpenManage Enterprise. +- ome_device_local_access_configuration - Issue(215035) - The module reports ``Successfully updated the local access setting`` if an unsupported value is provided for the parameter timeout_limit. However, this value is not actually applied on OpenManage Enterprise Modular. +- ome_device_local_access_configuration - Issue(217865) - The module does not display a proper error message if an unsupported value is provided for the user_defined and lcd_language parameters. +- ome_device_network_services - Issue(212681) - The module does not provide a proper error message if unsupported values are provided for the parameters- port_number, community_name, max_sessions, max_auth_retries, and idle_timeout. +- ome_device_power_settings - Issue(212679) - The module displays the following message if the value provided for the parameter ``power_cap`` is not within the supported range of 0 to 32767, ``Unable to complete the request because PowerCap does not exist or is not applicable for the resource URI.`` +- ome_device_quick_deploy - Issue(216352) - The module does not display a proper error message if an unsupported value is provided for the ipv6_prefix_length and vlan_id parameters. +- ome_smart_fabric_uplink - Issue(186024) - The module does not allow the creation of multiple uplinks of the same name even though it is supported by OpenManage Enterprise Modular. If an uplink is created using the same name as an existing uplink, the existing uplink is modified. + +New Modules +----------- + +- dellemc.openmanage.ome_application_console_preferences - Configures console preferences on OpenManage Enterprise. + v5.1.0 ====== diff --git a/README.md b/README.md index be609ad77..5c4c77248 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@ OpenManage Ansible Modules simplifies and automates provisioning, deployment, an ## Supported Platforms * iDRAC 7 based Dell EMC PowerEdge Servers with firmware versions 2.63.60.62 and above. * iDRAC 8 based Dell EMC PowerEdge Servers with firmware versions 2.81.81.81 and above. - * iDRAC 9 based Dell EMC PowerEdge Servers with firmware versions 5.00.10.20 and above. + * iDRAC 9 based Dell EMC PowerEdge Servers with firmware versions 5.10.00.00 and above. * Dell EMC OpenManage Enterprise versions 3.8.2 and above. * Dell EMC OpenManage Enterprise Modular versions 1.40.00 and above. diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index e5970c138..5cac16c75 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -629,4 +629,67 @@ releases: - description: Retrieves the information of server interface profile on OpenManage Enterprise Modular. name: ome_server_interface_profile_info namespace: '' - release_date: '2022-02-24' \ No newline at end of file + release_date: '2022-02-24' + 5.2.0: + changes: + minor_changes: + - ome_template - The module is enhanced to support check mode and + idempotency. + (https://github.com/dell/dellemc-openmanage-ansible-modules/issues/255) + - ome_template - The module is enhanced to support modifying a template + based on the attribute names instead of the ID. + (https://github.com/dell/dellemc-openmanage-ansible-modules/issues/358) + - ome_profile - The module is enhanced to support check mode and + idempotency. + - ome_profile - The module is enhanced to support modifying a profile + based on the attribute names instead of the ID. + - ome_diagnostics - The module is enhanced to support check mode and + idempotency. + (https://github.com/dell/dellemc-openmanage-ansible-modules/issues/345) + - ome_diagnostics - This module is enhanced to extract log from + lead chassis. + (https://github.com/dell/dellemc-openmanage-ansible-modules/issues/310) + - idrac_redfish_storage_controller - This module is enhanced to support + the following settings with check mode and idempotency - UnassignSpare, + EnableControllerEncryption, BlinkTarget, UnBlinkTarget, ConvertToRAID, + ConvertToNonRAID, ChangePDStateToOnline, ChangePDStateToOffline. + known_issues: + - ome_application_console_preferences - Issue(224690) - The module does + not display a proper error message when an unsupported value is provided + for the parameters report_row_limit, email_sender_settings, and + metric_collection_settings, and the value is applied on OpenManage Enterprise. + - ome_device_quick_deploy - Issue(216352) - The module does not display a + proper error message if an unsupported value is provided for the + ipv6_prefix_length and vlan_id parameters. + - ome_device_local_access_configuration - Issue(217865) - The module does not + display a proper error message if an unsupported value is provided for the + user_defined and lcd_language parameters. + - ome_device_local_access_configuration - Issue(215035) - The module reports + ``Successfully updated the local access setting`` if an unsupported value is + provided for the parameter timeout_limit. However, this value is not + actually applied on OpenManage Enterprise Modular. + - ome_device_network_services - Issue(212681) - The module does not provide a + proper error message if unsupported values are provided for the parameters- + port_number, community_name, max_sessions, max_auth_retries, and idle_timeout. + - ome_application_alerts_syslog - Issue(215374) - The module does not provide a + proper error message if the destination_address is more than 255 characters. + - ome_application_alerts_smtp - Issue(212310) - The module does not provide a + proper error message if the destination_address is more than 255 characters. + - idrac_user - Issue(192043) The module may error out with the message ``unable + to perform the import or export operation because there are pending attribute + changes or a configuration job is in progress``. Wait for the job to complete + and run the task again. + - ome_smart_fabric_uplink - Issue(186024) - The module does not allow the creation + of multiple uplinks of the same name even though it is supported by OpenManage + Enterprise Modular. If an uplink is created using the same name as an existing + uplink, the existing uplink is modified. + - ome_device_power_settings - Issue(212679) - The module displays the following + message if the value provided for the parameter ``power_cap`` is not within + the supported range of 0 to 32767, ``Unable to complete the request + because PowerCap does not exist or is not applicable for the resource URI.`` + release_summary: Support to configure console preferences on OpenManage Enterprise. + modules: + - description: Configures console preferences on OpenManage Enterprise. + name: ome_application_console_preferences + namespace: '' + release_date: '2022-03-29' \ No newline at end of file diff --git a/docs/README.md b/docs/README.md index 141e838bb..0230fb0fd 100644 --- a/docs/README.md +++ b/docs/README.md @@ -38,6 +38,7 @@ OpenManage Ansible Modules simplifies and automates provisioning, deployment, an - [ome_application_alerts_smtp](modules/ome_application_alerts_smtp.rst) - [ome_application_alerts_syslog](modules/ome_application_alerts_syslog.rst) - [ome_application_certificate](modules/ome_application_certificate.rst) +- [ome_application_console_preferences](modules/ome_application_console_preferences.rst) - [ome_application_network_address](modules/ome_application_network_address.rst) - [ome_application_network_proxy](modules/ome_application_network_proxy.rst) - [ome_application_network_settings](modules/ome_application_network_settings.rst) diff --git a/docs/modules/idrac_redfish_storage_controller.rst b/docs/modules/idrac_redfish_storage_controller.rst index f5715dbb8..997e52336 100644 --- a/docs/modules/idrac_redfish_storage_controller.rst +++ b/docs/modules/idrac_redfish_storage_controller.rst @@ -1,8 +1,8 @@ .. _idrac_redfish_storage_controller_module: -idrac_redfish_storage_controller -- Configures the storage controller settings -============================================================================== +idrac_redfish_storage_controller -- Configures the physical disk, virtual disk, and storage controller settings +=============================================================================================================== .. contents:: :local: @@ -12,7 +12,7 @@ idrac_redfish_storage_controller -- Configures the storage controller settings Synopsis -------- -This module configures the settings of the storage controller using Redfish. +This module allows the users to configure the settings of the physical disk, virtual disk, and storage controller. @@ -28,66 +28,104 @@ Parameters ---------- command (optional, str, AssignSpare) - These actions may require a system reset, depending on the controller's capabilities. + These actions may require a system reset, depending on the capabilities of the controller. - ``ResetConfig`` - Deletes all the virtual disks and unassigns all hot spares on physical disks. + ``ResetConfig`` - Deletes all the virtual disks and unassigns all hot spares on physical disks. *controller_id* is required for this operation. - ``AssignSpare`` - Assigns a physical disk as a dedicated or global hot spare for a virtual disk. + ``AssignSpare`` - Assigns a physical disk as a dedicated or global hot spare for a virtual disk. *target* is required for this operation. - ``SetControllerKey`` - Sets the key on controllers, which is used to encrypt the drives in Local key Management(LKM). + ``SetControllerKey`` - Sets the key on controllers, which is used to encrypt the drives in Local Key Management(LKM). *controller_id*, *key*, and *key_id* are required for this operation. - ``RemoveControllerKey`` - Erases the encryption key on the controller. + ``RemoveControllerKey`` - Deletes the encryption key on the controller. *controller_id* is required for this operation. - ``ReKey`` - Resets the key on the controller. + ``ReKey`` - Resets the key on the controller and it always reports as changes found when check mode is enabled. *controller_id*, *old_key*, *key_id*, and *key* is required for this operation. + ``UnassignSpare`` - To unassign the Global or Dedicated hot spare. *target* is required for this operation. - target (optional, str, None) - Fully Qualified Device Descriptor (FQDD) of the target physical drive that is assigned as a spare. + ``EnableControllerEncryption`` - To enable Local Key Management (LKM) or Secure Enterprise Key Manager (SEKM) on controllers that support encryption of the drives. *controller_id*, *key*, and *key_id* are required for this operation. - This is mandatory when *command* is ``AssignSpare``. + ``BlinkTarget`` - Blinks the target virtual drive or physical disk and it always reports as changes found when check mode is enabled. *target* or *volume_id* is required for this operation. - If *volume_id* is not specified or empty, this physical drive will be assigned as a global hot spare. + ``UnBlinkTarget`` - Unblink the target virtual drive or physical disk and and it always reports as changes found when check mode is enabled. *target* or *volume_id* is required for this operation. + + ``ConvertToRAID`` - Converts the disk form non-Raid to Raid. *target* is required for this operation. + + ``ConvertToNonRAID`` - Converts the disk form Raid to non-Raid. *target* is required for this operation. + + ``ChangePDStateToOnline`` - To set the disk status to online. *target* is required for this operation. + + ``ChangePDStateToOffline`` - To set the disk status to offline. *target* is required for this operation. + + + target (optional, list, None) + Fully Qualified Device Descriptor (FQDD) of the target physical drive. + + This is mandatory when *command* is ``AssignSpare``, ``UnassisgnSpare``, ``ChangePDStateToOnline``, ``ChangePDStateToOffline``, ``ConvertToRAID``, or ``ConvertToNonRAID``. + + If *volume_id* is not specified or empty, this physical drive will be assigned as a global hot spare when *command* is ``AssignSpare``. + + Notes: Global or Dedicated hot spare can be assigned only once for a physical disk, Re-assign cannot be done when *command* is ``AssignSpare``. volume_id (optional, list, None) - FQDD of the volumes to which a hot spare is assigned. + Fully Qualified Device Descriptor (FQDD) of the volume. - Applicable if *command* is ``AssignSpare``. + Applicable if *command* is ``AssignSpare``, ``BlinkTarget``, and ``UnBlinkTarget``. - To know the number of volumes to which a hot spare can be assigned, refer iDRAC Redfish API guide. + *volume_id* or *target* is required when the *command* is ``BlinkTarget`` or ``UnBlinkTarget``, if both are specified *target* is considered. + + To know the number of volumes to which a hot spare can be assigned, refer iDRAC Redfish API documentation. controller_id (optional, str, None) - FQDD of the storage controller. For example- 'RAID.Slot.1-1'. + Fully Qualified Device Descriptor (FQDD) of the storage controller. For example-'RAID.Slot.1-1'. - This option is mandatory when *command* is ``ResetConfig``, ``SetControllerKey``, ``RemoveControllerKey`` and ``ReKey``. + This option is mandatory when *command* is ``ResetConfig``, ``SetControllerKey``, ``RemoveControllerKey``, ``ReKey``, or ``EnableControllerEncryption``. key (optional, str, None) - A new security key passphrase that the encryption-capable controller uses to create the encryption key. The controller uses the encryption key to lock or unlock access to the Self Encryption Disk(SED). Only one encryption key can be created for each controller. + A new security key passphrase that the encryption-capable controller uses to create the encryption key. The controller uses the encryption key to lock or unlock access to the Self-Encrypting Drive (SED). Only one encryption key can be created for each controller. + + This is mandatory when *command* is ``SetControllerKey``, ``ReKey``, or ``EnableControllerEncryption`` and when *mode* is ``LKM``. + + The length of the key can be a maximum of 32 characters in length, where the expanded form of the special character is counted as a single character. - This is mandatory when *command* is ``SetControllerKey`` or ``ReKey``, and when *mode* is ``LKM``. + The key must contain at least one character from each of the character classes: uppercase, lowercase, number, and special character. key_id (optional, str, None) This is a user supplied text label associated with the passphrase. - This is mandatory when *command* is ``SetControllerKey`` or ``ReKey``, and when *mode* is ``LKM``. + This is mandatory when *command* is ``SetControllerKey``, ``ReKey``, or ``EnableControllerEncryption`` and when *mode* is ``LKM``. + + The length of *key_id* can be a maximum of 32 characters in length and should not have any spaces. old_key (optional, str, None) - Security key passphrase used by the encryption-capable controller.. + Security key passphrase used by the encryption-capable controller. This option is mandatory when *command* is ``ReKey`` and *mode* is ``LKM``. mode (optional, str, LKM) - Encryption mode of the encryption-capable controller: 1 - Local Key Management (LKM), 2 - Security Enterprise Key Manager(SEKM). + Encryption mode of the encryption capable controller. - This option is applicable only when *command* is ``ReKey``. + This option is applicable only when *command* is ``ReKey`` or ``EnableControllerEncryption``. ``SEKM`` requires secure enterprise key manager license on the iDRAC. + ``LKM`` to choose mode as local key mode. + + + job_wait (optional, bool, False) + Provides the option if the module has to wait for the job to be completed. + + + job_wait_timeout (optional, int, 120) + The maximum wait time of job completion in seconds before the job tracking is stopped. + + This option is applicable when *job_wait* is ``True``. + baseuri (True, str, None) IP address of the target out-of-band controller. For example- :. @@ -124,8 +162,9 @@ Notes ----- .. note:: - - Run this module from a system that has direct access to DellEMC iDRAC. - - This module does not support ``check_mode``. + - Run this module from a system that has direct access to Dell EMC iDRAC. + - This module always reports as changes found when ``ReKey``, ``BlinkTarget``, and ``UnBlinkTarget``. + - This module supports ``check_mode``. @@ -159,6 +198,17 @@ Examples tags: - assign_global_hot_spare + - name: Unassign hot spare + dellemc.openmanage.idrac_redfish_storage_controller: + baseuri: "192.168.0.1:443" + username: "user_name" + password: "user_password" + ca_path: "/path/to/ca_cert.pem" + target: "Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1" + command: UnassignSpare + tags: + - un-assign-hot-spare + - name: Set controller encryption key dellemc.openmanage.idrac_redfish_storage_controller: baseuri: "192.168.0.1:443" @@ -220,6 +270,108 @@ Examples tags: - reset_config + - name: Enable controller encryption + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "EnableControllerEncryption" + controller_id: "RAID.Slot.1-1" + mode: "LKM" + key: "your_Key@123" + key_id: "your_Keyid@123" + tags: + - enable-encrypt + + - name: Blink physical disk. + dellemc.openmanage.idrac_redfish_storage_controller: + baseuri: "192.168.0.1:443" + username: "user_name" + password: "user_password" + ca_path: "/path/to/ca_cert.pem" + command: BlinkTarget + target: "Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1" + tags: + - blink-target + + - name: Blink virtual drive. + dellemc.openmanage.idrac_redfish_storage_controller: + baseuri: "192.168.0.1:443" + username: "user_name" + password: "user_password" + ca_path: "/path/to/ca_cert.pem" + command: BlinkTarget + volume_id: "Disk.Virtual.0:RAID.Slot.1-1" + tags: + - blink-volume + + - name: Unblink physical disk. + dellemc.openmanage.idrac_redfish_storage_controller: + baseuri: "192.168.0.1:443" + username: "user_name" + password: "user_password" + ca_path: "/path/to/ca_cert.pem" + command: UnBlinkTarget + target: "Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1" + tags: + - unblink-target + + - name: Unblink virtual drive. + dellemc.openmanage.idrac_redfish_storage_controller: + baseuri: "192.168.0.1:443" + username: "user_name" + password: "user_password" + ca_path: "/path/to/ca_cert.pem" + command: UnBlinkTarget + volume_id: "Disk.Virtual.0:RAID.Slot.1-1" + tags: + - unblink-drive + + - name: Convert physical disk to RAID + dellemc.openmanage.idrac_redfish_storage_controller: + baseuri: "192.168.0.1:443" + username: "user_name" + password: "user_password" + ca_path: "/path/to/ca_cert.pem" + command: "ConvertToRAID" + target: "Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1" + tags: + - convert-raid + + - name: Convert physical disk to non-RAID + dellemc.openmanage.idrac_redfish_storage_controller: + baseuri: "192.168.0.1:443" + username: "user_name" + password: "user_password" + ca_path: "/path/to/ca_cert.pem" + command: "ConvertToNonRAID" + target: "Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1" + tags: + - convert-non-raid + + - name: Change physical disk state to online. + dellemc.openmanage.idrac_redfish_storage_controller: + baseuri: "192.168.0.1:443" + username: "user_name" + password: "user_password" + ca_path: "/path/to/ca_cert.pem" + command: "ChangePDStateToOnline" + target: "Disk.Bay.1:Enclosure.Internal.0-1:RAID.Slot.1-1" + tags: + - pd-state-online + + - name: Change physical disk state to offline. + dellemc.openmanage.idrac_redfish_storage_controller: + baseuri: "192.168.0.1:443" + username: "user_name" + password: "user_password" + ca_path: "/path/to/ca_cert.pem" + command: "ChangePDStateToOnline" + target: "Disk.Bay.1:Enclosure.Internal.0-1:RAID.Slot.1-1" + tags: + - pd-state-offline + Return Values @@ -233,6 +385,10 @@ task (success, dict, AnsibleMapping([('id', 'JID_XXXXXXXXXXXXX'), ('uri', '/redf ID and URI resource of the job created. +status (always, dict, AnsibleMapping([('ActualRunningStartTime', '2022-02-09T04:42:41'), ('ActualRunningStopTime', '2022-02-09T04:44:00'), ('CompletionTime', '2022-02-09T04:44:00'), ('Description', 'Job Instance'), ('EndTime', 'TIME_NA'), ('Id', 'JID_444033604418'), ('JobState', 'Completed'), ('JobType', 'RealTimeNoRebootConfiguration'), ('Message', 'Job completed successfully.'), ('MessageArgs', []), ('MessageId', 'PR19'), ('Name', 'Configure: RAID.Integrated.1-1'), ('PercentComplete', 100), ('StartTime', '2022-02-09T04:42:40'), ('TargetSettingsURI', None)])) + status of the submitted job. + + error_info (on http error, dict, AnsibleMapping([('error', AnsibleMapping([('@Message.ExtendedInfo', [AnsibleMapping([('Message', 'Unable to run the method because the requested HTTP method is not allowed.'), ('MessageArgs', []), ('MessageArgs@odata.count', 0), ('MessageId', 'iDRAC.1.6.SYS402'), ('RelatedProperties', []), ('RelatedProperties@odata.count', 0), ('Resolution', 'Enter a valid HTTP method and retry the operation. For information about valid methods, see the Redfish Users Guide available on the support site.'), ('Severity', 'Informational')])]), ('code', 'Base.1.0.GeneralError'), ('message', 'A general error has occurred. See ExtendedInfo for more information')]))])) Details of a http error. @@ -251,4 +407,5 @@ Authors ~~~~~~~ - Jagadeesh N V (@jagadeeshnv) +- Felix Stephen (@felixs88) diff --git a/docs/modules/ome_application_console_preferences.rst b/docs/modules/ome_application_console_preferences.rst new file mode 100644 index 000000000..b30d66523 --- /dev/null +++ b/docs/modules/ome_application_console_preferences.rst @@ -0,0 +1,314 @@ +.. _ome_application_console_preferences_module: + + +ome_application_console_preferences -- Configure console preferences on OpenManage Enterprise. +============================================================================================== + +.. contents:: + :local: + :depth: 1 + + +Synopsis +-------- + +This module allows user to configure the console preferences on OpenManage Enterprise. + + + +Requirements +------------ +The below requirements are needed on the host that executes this module. + +- python >= 3.8.6 + + + +Parameters +---------- + + report_row_limit (optional, int, None) + The maximum number of rows that you can view on OpenManage Enterprise reports. + + + device_health (optional, dict, None) + The time after which the health of the devices must be automatically monitored and updated on the OpenManage Enterprise dashboard. + + + health_check_interval (optional, int, None) + The frequency at which the device health must be recorded and data stored. + + + health_check_interval_unit (optional, str, None) + The time unit of the frequency at which the device health must be recorded and data stored. + + ``Hourly`` to set the frequency in hours. + + ``Minutes`` to set the frequency in minutes. + + + health_and_power_state_on_connection_lost (optional, str, None) + The latest recorded device health. + + ``last_known`` to display the latest recorded device health when the power connection was lost. + + ``unknown`` to display the latest recorded device health when the device status moved to unknown. + + + + discovery_settings (optional, dict, None) + The device naming to be used by the OpenManage Enterprise to identify the discovered iDRACs and other devices. + + + general_device_naming (optional, str, DNS) + Applicable to all the discovered devices other than the iDRACs. + + ``DNS`` to use the DNS name. + + ``NETBIOS`` to use the NetBIOS name. + + + server_device_naming (optional, str, IDRAC_SYSTEM_HOSTNAME) + Applicable to iDRACs only. + + ``IDRAC_HOSTNAME`` to use the iDRAC hostname. + + ``IDRAC_SYSTEM_HOSTNAME`` to use the system hostname. + + + invalid_device_hostname (optional, str, None) + The invalid hostnames separated by a comma. + + + common_mac_addresses (optional, str, None) + The common MAC addresses separated by a comma. + + + + server_initiated_discovery (optional, dict, None) + Server initiated discovery settings. + + + device_discovery_approval_policy (optional, str, None) + Discovery approval policies. + + ``Automatic`` allows servers with iDRAC Firmware version 4.00.00.00, which are on the same network as the console, to be discovered automatically by the console. + + ``Manual`` for the servers to be discovered by the user manually. + + + set_trap_destination (optional, bool, None) + Trap destination settings. + + + + mx7000_onboarding_preferences (optional, str, None) + Alert-forwarding behavior on chassis when they are onboarded. + + ``all`` to receive all alert. + + ``chassis`` to receive chassis category alerts only. + + + builtin_appliance_share (optional, dict, None) + The external network share that the appliance must access to complete operations. + + + share_options (optional, str, None) + The share options. + + ``CIFS`` to select CIFS share type. + + ``HTTPS`` to select HTTPS share type. + + + cifs_options (optional, str, None) + The SMB protocol version. + + *cifs_options* is required *share_options* is ``CIFS``. + + ``V1`` to enable SMBv1. + + ``V2`` to enable SMBv2 + + + + email_sender_settings (optional, str, None) + The email address of the user who is sending an email message. + + + trap_forwarding_format (optional, str, None) + The trap forwarding format. + + ``Original`` to retain the trap data as is. + + ``Normalized`` to normalize the trap data. + + + metrics_collection_settings (optional, int, None) + The frequency of the PowerManager extension data maintenance and purging. + + + hostname (True, str, None) + OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname. + + + username (True, str, None) + OpenManage Enterprise or OpenManage Enterprise Modular username. + + + password (True, str, None) + OpenManage Enterprise or OpenManage Enterprise Modular password. + + + port (optional, int, 443) + OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port. + + + validate_certs (optional, bool, True) + If ``False``, the SSL certificates will not be validated. + + Configure ``False`` only on personally controlled sites where self-signed certificates are used. + + Prior to collection version ``5.0.0``, the *validate_certs* is ``False`` by default. + + + ca_path (optional, path, None) + The Privacy Enhanced Mail (PEM) file that contains a CA certificate to be used for the validation. + + + timeout (optional, int, 30) + The socket level timeout in seconds. + + + + + +Notes +----- + +.. note:: + - This module supports ``check_mode``. + + + + +Examples +-------- + +.. code-block:: yaml+jinja + + + --- + - name: Update Console preferences with all the settings. + dellemc.openmanage.ome_application_console_preferences: + hostname: "192.168.0.1" + username: "username" + password: "password" + ca_path: "/path/to/ca_cert.pem" + report_row_limit: 123 + device_health: + health_check_interval: 1 + health_check_interval_unit: Hourly + health_and_power_state_on_connection_lost: last_known + discovery_settings: + general_device_naming: DNS + server_device_naming: IDRAC_HOSTNAME + invalid_device_hostname: "localhost" + common_mac_addresses: "::" + server_initiated_discovery: + device_discovery_approval_policy: Automatic + set_trap_destination: True + mx7000_onboarding_preferences: all + builtin_appliance_share: + share_options: CIFS + cifs_options: V1 + email_sender_settings: "admin@dell.com" + trap_forwarding_format: Normalized + metrics_collection_settings: 31 + + - name: Update Console preferences with report and device health settings. + dellemc.openmanage.ome_application_console_preferences: + hostname: "192.168.0.1" + username: "username" + password: "password" + ca_path: "/path/to/ca_cert.pem" + report_row_limit: 236 + device_health: + health_check_interval: 10 + health_check_interval_unit: Hourly + health_and_power_state_on_connection_lost: last_known + + - name: Update Console preferences with invalid device health settings. + dellemc.openmanage.ome_application_console_preferences: + hostname: "192.168.0.1" + username: "username" + password: "password" + ca_path: "/path/to/ca_cert.pem" + device_health: + health_check_interval: 65 + health_check_interval_unit: Minutes + + - name: Update Console preferences with discovery and built in appliance share settings. + dellemc.openmanage.ome_application_console_preferences: + hostname: "192.168.0.1" + username: "username" + password: "password" + ca_path: "/path/to/ca_cert.pem" + discovery_settings: + general_device_naming: DNS + server_device_naming: IDRAC_SYSTEM_HOSTNAME + invalid_device_hostname: "localhost" + common_mac_addresses: "00:53:45:00:00:00" + builtin_appliance_share: + share_options: CIFS + cifs_options: V1 + + - name: Update Console preferences with server initiated discovery, mx7000 onboarding preferences, email sender, + trap forwarding format, and metrics collection settings. + dellemc.openmanage.ome_application_console_preferences: + hostname: "192.168.0.1" + username: "username" + password: "password" + ca_path: "/path/to/ca_cert.pem" + server_initiated_discovery: + device_discovery_approval_policy: Automatic + set_trap_destination: True + mx7000_onboarding_preferences: chassis + email_sender_settings: "admin@dell.com" + trap_forwarding_format: Original + metrics_collection_settings: 365 + + + +Return Values +------------- + +msg (always, str, Successfully update the console preferences.) + Overall status of the console preferences. + + +console_preferences (on success, list, [AnsibleMapping([('Name', 'DEVICE_PREFERRED_NAME'), ('DefaultValue', 'SLOT_NAME'), ('Value', 'PREFER_DNS,PREFER_IDRAC_SYSTEM_HOSTNAME'), ('DataType', 'java.lang.String'), ('GroupName', 'DISCOVERY_SETTING')]), AnsibleMapping([('Name', 'INVALID_DEVICE_HOSTNAME'), ('DefaultValue', ''), ('Value', 'localhost,localhost.localdomain,not defined,pv132t,pv136t,default,dell,idrac-'), ('DataType', 'java.lang.String'), ('GroupName', 'DISCOVERY_SETTING')]), AnsibleMapping([('Name', 'COMMON_MAC_ADDRESSES'), ('DefaultValue', ''), ('Value', '00:53:45:00:00:00,33:50:6F:45:30:30,50:50:54:50:30:30,00:00:FF:FF:FF:FF,20:41:53:59:4E:FF,00:00:00:00:00:00,20:41:53:59:4e:ff,00:00:00:00:00:00'), ('DataType', 'java.lang.String'), ('GroupName', 'DISCOVERY_SETTING')]), AnsibleMapping([('Name', 'SHARE_TYPE'), ('DefaultValue', 'CIFS'), ('Value', 'CIFS'), ('DataType', 'java.lang.String'), ('GroupName', 'BUILT_IN_APPLIANCE_SHARE_SETTINGS')]), AnsibleMapping([('Name', 'TRAP_FORWARDING_SETTING'), ('DefaultValue', 'AsIs'), ('Value', 'Normalized'), ('DataType', 'java.lang.String'), ('GroupName', '')]), AnsibleMapping([('Name', 'DATA_PURGE_INTERVAL'), ('DefaultValue', '365'), ('Value', '3650000'), ('DataType', 'java.lang.Integer'), ('GroupName', '')]), AnsibleMapping([('Name', 'CONSOLE_CONNECTION_SETTING'), ('DefaultValue', 'last_known'), ('Value', 'last_known'), ('DataType', 'java.lang.String'), ('GroupName', 'CONSOLE_CONNECTION_SETTING')]), AnsibleMapping([('Name', 'MIN_PROTOCOL_VERSION'), ('DefaultValue', 'V2'), ('Value', 'V1'), ('DataType', 'java.lang.String'), ('GroupName', 'CIFS_PROTOCOL_SETTINGS')]), AnsibleMapping([('Name', 'ALERT_ACKNOWLEDGEMENT_VIEW'), ('DefaultValue', '2000'), ('Value', '2000'), ('DataType', 'java.lang.Integer'), ('GroupName', '')]), AnsibleMapping([('Name', 'AUTO_CONSOLE_UPDATE_AFTER_DOWNLOAD'), ('DefaultValue', 'false'), ('Value', 'false'), ('DataType', 'java.lang.Boolean'), ('GroupName', 'CONSOLE_UPDATE_SETTING_GROUP')]), AnsibleMapping([('Name', 'NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION'), ('DefaultValue', 'false'), ('Value', 'false'), ('DataType', 'java.lang.Boolean'), ('GroupName', '')]), AnsibleMapping([('Name', 'REPORTS_MAX_RESULTS_LIMIT'), ('DefaultValue', '0'), ('Value', '2000000000000000000000000'), ('DataType', 'java.lang.Integer'), ('GroupName', '')]), AnsibleMapping([('Name', 'EMAIL_SENDER'), ('DefaultValue', 'omcadmin@dell.com'), ('Value', 'admin1@dell.com@dell.com@dell.com'), ('DataType', 'java.lang.String'), ('GroupName', '')]), AnsibleMapping([('Name', 'MX7000_ONBOARDING_PREF'), ('DefaultValue', 'all'), ('Value', 'test_chassis'), ('DataType', 'java.lang.String'), ('GroupName', '')]), AnsibleMapping([('Name', 'DISCOVERY_APPROVAL_POLICY'), ('DefaultValue', 'Automatic'), ('Value', 'Automatic_test'), ('DataType', 'java.lang.String'), ('GroupName', '')])]) + Details of the console preferences. + + +error_info (on HTTP error, dict, AnsibleMapping([('error', AnsibleMapping([('code', 'Base.1.0.GeneralError'), ('message', 'A general error has occurred. See ExtendedInfo for more information.'), ('@Message.ExtendedInfo', [AnsibleMapping([('MessageId', 'CGEN1006'), ('RelatedProperties', []), ('Message', 'Unable to complete the request because the resource URI does not exist or is not implemented.'), ('MessageArgs', []), ('Severity', 'Critical'), ('Resolution', 'Enter a valid URI and retry the operation.')])])]))])) + Details of the HTTP error. + + + + + +Status +------ + + + + + +Authors +~~~~~~~ + +- Sachin Apagundi(@sachin-apa) +- Husniya Hameed (@husniya-hameed) + diff --git a/docs/modules/ome_application_network_address.rst b/docs/modules/ome_application_network_address.rst index 88fa5db0f..c3e3228b8 100644 --- a/docs/modules/ome_application_network_address.rst +++ b/docs/modules/ome_application_network_address.rst @@ -238,7 +238,7 @@ Notes .. note:: - The configuration changes can only be applied to one interface at a time. - The system management consoles might be unreachable for some time after the configuration changes are applied. - - This module does not support ``check_mode``. + - This module supports ``check_mode``. diff --git a/docs/modules/ome_diagnostics.rst b/docs/modules/ome_diagnostics.rst index 834ffec4b..7f4c2dede 100644 --- a/docs/modules/ome_diagnostics.rst +++ b/docs/modules/ome_diagnostics.rst @@ -131,6 +131,12 @@ Parameters *job_wait* and *job_wait_timeout* options are not applicable for *test_connection*. + lead_chassis_only (optional, bool, False) + Extract the logs from Lead chassis only. + + *lead_chassis_only* is only applicable when *log_type* is ``application`` on OpenManage Enterprise Modular. + + hostname (True, str, None) OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname. @@ -166,6 +172,13 @@ Parameters +Notes +----- + +.. note:: + - Run this module from a system that has direct access to OpenManage Enterprise. + - This module performs the test connection and device validations. It does not create a job for copying the logs in check mode and always reports as changes found. + - This module supports ``check_mode``. @@ -265,4 +278,5 @@ Authors ~~~~~~~ - Felix Stephen (@felixs88) +- Sachin Apagundi(@sachin-apa) diff --git a/docs/modules/ome_identity_pool.rst b/docs/modules/ome_identity_pool.rst index 69c02acb2..733c837c8 100644 --- a/docs/modules/ome_identity_pool.rst +++ b/docs/modules/ome_identity_pool.rst @@ -183,7 +183,7 @@ Notes .. note:: - Run this module from a system that has direct access to DellEMC OpenManage Enterprise. - - This module does not support ``check_mode``. + - This module supports ``check_mode``. diff --git a/docs/modules/ome_profile.rst b/docs/modules/ome_profile.rst index 446cd530a..e0b5f0eeb 100644 --- a/docs/modules/ome_profile.rst +++ b/docs/modules/ome_profile.rst @@ -181,6 +181,8 @@ Parameters List of attributes to be overridden when *command* is ``assign``. + Use the *Id* If the attribute Id is available. If not, use the comma separated I (DisplayName). For more details about using the *DisplayName*, see the example provided. + Options (optional, dict, None) Provides the different shut down options. @@ -235,7 +237,7 @@ Notes .. note:: - Run this module from a system that has direct access to DellEMC OpenManage Enterprise. - - This module does not support ``check_mode``. + - This module supports ``check_mode``. - ``assign`` operation on a already assigned profile will not redeploy. @@ -315,10 +317,15 @@ Examples Attributes: - Id: 4506 Value: "server attr 1" - IsIgnored: true + IsIgnored: false - Id: 4507 Value: "server attr 2" - IsIgnored: true + IsIgnored: false + # Enter the comma separated string as appearing in the Detailed view on GUI + # System -> Server Topology -> ServerTopology 1 Aisle Name + - DisplayName: 'System, Server Topology, ServerTopology 1 Aisle Name' + Value: Aisle 5 + IsIgnored: false - name: Delete a profile using profile name dellemc.openmanage.ome_profile: diff --git a/docs/modules/ome_server_interface_profiles.rst b/docs/modules/ome_server_interface_profiles.rst index 9a6dbd8ec..c4f9f0f40 100644 --- a/docs/modules/ome_server_interface_profiles.rst +++ b/docs/modules/ome_server_interface_profiles.rst @@ -68,7 +68,7 @@ Parameters untagged_network (optional, int, None) The maximum or minimum VLAN id of the network to be untagged. - The *untagged_network* can be retrieved using the :ref:`ome_network_vlan_info ` + The *untagged_network* can be retrieved using the :ref:`dellemc.openmanage.ome_network_vlan_info ` If *untagged_network* needs to be unset this needs to be sent as ``0`` @@ -92,7 +92,7 @@ Parameters names (True, list, None) List of network name to be marked as tagged networks - The *names* can be retrieved using the :ref:`ome_network_vlan_info ` + The *names* can be retrieved using the :ref:`dellemc.openmanage.ome_network_vlan_info ` diff --git a/docs/modules/ome_template.rst b/docs/modules/ome_template.rst index d9062a56f..5b8681c1d 100644 --- a/docs/modules/ome_template.rst +++ b/docs/modules/ome_template.rst @@ -86,7 +86,7 @@ Parameters attributes (optional, dict, None) Payload data for the template operations. All the variables in this option are added as payload for ``create``, ``modify``, ``deploy``, ``import``, and ``clone`` operations. It takes the following attributes. - Attributes: List of dictionaries of attributes (if any) to be modified in the deployment template. This is applicable when *command* is ``deploy`` and ``modify``. + Attributes: List of dictionaries of attributes (if any) to be modified in the deployment template. This is applicable when *command* is ``deploy`` and ``modify``. Use the *Id* If the attribute Id is available. If not, use the comma separated I (DisplayName). For more details about using the *DisplayName*, see the example provided. Name: Name of the template. This is mandatory when *command* is ``create``, ``import``, ``clone``, and optional when *command* is ``modify``. @@ -149,7 +149,7 @@ Notes .. note:: - Run this module from a system that has direct access to DellEMC OpenManage Enterprise. - - This module does not support ``check_mode``. + - This module supports ``check_mode``. @@ -191,6 +191,28 @@ Examples Value: "Test Attribute" IsIgnored: false + - name: Modify template name, description, and attribute using detailed view + dellemc.openmanage.ome_template: + hostname: "192.168.0.1" + username: "username" + password: "password" + ca_path: "/path/to/ca_cert.pem" + command: "modify" + template_id: 12 + attributes: + Name: "New Custom Template" + Description: "Custom Template Description" + Attributes: + # Enter the comma separated string as appearing in the Detailed view on GUI + # NIC -> NIC.Integrated.1-1-1 -> NIC Configuration -> Wake On LAN1 + - DisplayName: 'NIC, NIC.Integrated.1-1-1, NIC Configuration, Wake On LAN' + Value: Enabled + IsIgnored: false + # System -> LCD Configuration -> LCD 1 User Defined String for LCD + - DisplayName: 'System, LCD Configuration, LCD 1 User Defined String for LCD' + Value: LCD str by OMAM + IsIgnored: false + - name: Deploy template on multiple devices dellemc.openmanage.ome_template: hostname: "192.168.0.1" @@ -403,7 +425,7 @@ Examples attributes: Name: "Imported Template Name" Type: 2 - Content: "{{ lookup('ansible.builtin.file.', '/path/to/xmlfile') }}" + Content: "{{ lookup('ansible.builtin.file', '/path/to/xmlfile') }}" - name: "Deploy template and Operating System (OS) on multiple devices." dellemc.openmanage.ome_template: @@ -500,6 +522,10 @@ Content (success, when I(command) is C(export), str, diff --git a/playbooks/idrac/idrac_redfish_storage_controller.yml b/playbooks/idrac/idrac_redfish_storage_controller.yml index 5962d7df1..ca558d91e 100644 --- a/playbooks/idrac/idrac_redfish_storage_controller.yml +++ b/playbooks/idrac/idrac_redfish_storage_controller.yml @@ -25,15 +25,28 @@ baseuri: "{{ baseuri }}" username: "{{ username }}" password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" target: "Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1" tags: - assign_global_hot_spare + - name: Unassign hot spare + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + target: "Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1" + command: UnassignSpare + tags: + - un-assign-hot-spare + - name: Set controller encryption key. idrac_redfish_storage_controller: baseuri: "{{ baseuri }}" username: "{{ username }}" password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" command: "SetControllerKey" controller_id: "RAID.Slot.1-1" key: "PassPhrase@123" @@ -46,6 +59,7 @@ baseuri: "{{ baseuri }}" username: "{{ username }}" password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" command: "ReKey" controller_id: "RAID.Slot.1-1" key: "NewPassPhrase@123" @@ -59,6 +73,7 @@ baseuri: "{{ baseuri }}" username: "{{ username }}" password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" command: "ReKey" controller_id: "RAID.Slot.1-1" mode: "SEKM" @@ -70,6 +85,7 @@ baseuri: "{{ baseuri }}" username: "{{ username }}" password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" command: "RemoveControllerKey" controller_id: "RAID.Slot.1-1" tags: @@ -80,7 +96,110 @@ baseuri: "{{ baseuri }}" username: "{{ username }}" password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" command: "ResetConfig" controller_id: "RAID.Slot.1-1" tags: - - reset_config \ No newline at end of file + - reset_config + + - name: Enable controller encryption + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "EnableControllerEncryption" + controller_id: "RAID.Slot.1-1" + mode: "LKM" + key: "your_Key@123" + key_id: "your_Keyid@123" + tags: + - enable-encrypt + + - name: Blink physical disk. + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "BlinkTarget" + target: "Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1" + tags: + - blink-target + + - name: Blink virtual drive. + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "BlinkTarget" + volume_id: "Disk.Virtual.0:RAID.Slot.1-1" + tags: + - blink-volume + + - name: Unblink physical disk. + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "UnBlinkTarget" + target: "Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1" + tags: + - unblink-target + + - name: Unblink virtual drive. + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "UnBlinkTarget" + volume_id: "Disk.Virtual.0:RAID.Slot.1-1" + tags: + - unblink-drive + + - name: Convert physical disk to RAID + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "ConvertToRAID" + target: "Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1" + tags: + - convert-raid + + - name: Convert physical disk to non-RAID + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "ConvertToNonRAID" + target: "Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1" + tags: + - convert-non-raid + + - name: Change physical disk state to online. + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "ChangePDStateToOnline" + target: "Disk.Bay.1:Enclosure.Internal.0-1:RAID.Slot.1-1" + tags: + - pd-state-online + + - name: Change physical disk state to offline. + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "ChangePDStateToOnline" + target: "Disk.Bay.1:Enclosure.Internal.0-1:RAID.Slot.1-1" + tags: + - pd-state-offline diff --git a/playbooks/ome/application/ome_application_console_preferences.yml b/playbooks/ome/application/ome_application_console_preferences.yml new file mode 100644 index 000000000..b0b29ae97 --- /dev/null +++ b/playbooks/ome/application/ome_application_console_preferences.yml @@ -0,0 +1,97 @@ +--- +- hosts: ome + connection: local + name: Dell OME Application Console Preferences. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Update Console preferences with all the settings. + ome_application_console_preferences: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + report_row_limit: 123 + device_health: + health_check_interval: 1 + health_check_interval_unit: "Hourly" + health_and_power_state_on_connection_lost: "last_known" + discovery_settings: + general_device_naming: "DNS" + server_device_naming: "IDRAC_HOSTNAME" + invalid_device_hostname: "localhost" + common_mac_addresses: "::" + server_initiated_discovery: + device_discovery_approval_policy: "Automatic" + set_trap_destination: True + mx7000_onboarding_preferences: "all" + builtin_appliance_share: + share_options: "CIFS" + cifs_options: "V1" + email_sender_settings: "admin@dell.com" + trap_forwarding_format: "Original" + metrics_collection_settings: 31 + tags: + - all_settings + + - name: Update Console preferences with report and device health settings. + ome_application_console_preferences: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + report_row_limit: 236 + device_health: + health_check_interval: 10 + health_check_interval_unit: "Hourly" + health_and_power_state_on_connection_lost: "last_known" + tags: + - valid_report_device + + - name: Update Console preferences with invalid device health settings. + ome_application_console_preferences: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_health: + health_check_interval: 65 + health_check_interval_unit: "Minutes" + tags: + - invalid_device + + - name: Update Console preferences with discovery and built in appliance share settings. + ome_application_console_preferences: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + discovery_settings: + general_device_naming: "DNS" + server_device_naming: "IDRAC_SYSTEM_HOSTNAME" + invalid_device_hostname: "localhost" + common_mac_addresses: "00:53:45:00:00:00" + builtin_appliance_share: + share_options: "CIFS" + cifs_options: "V1" + tags: + - valid_discovery + + - name: Update Console preferences with server initiated discovery, mx7000 onboarding preferences, email sender, trap forwarding format, and metrics collection settings. + ome_application_console_preferences: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + server_initiated_discovery: + device_discovery_approval_policy: "Automatic" + set_trap_destination: True + mx7000_onboarding_preferences: "chassis" + email_sender_settings: "admin@dell.com" + trap_forwarding_format: "Normalized" + metrics_collection_settings: 361 + tags: + - valid_metrics diff --git a/playbooks/ome/profile/ome_profile.yml b/playbooks/ome/profile/ome_profile.yml index 20937701f..14d43e6ac 100644 --- a/playbooks/ome/profile/ome_profile.yml +++ b/playbooks/ome/profile/ome_profile.yml @@ -86,6 +86,9 @@ - Id: 4507 Value: "server attr 2" IsIgnored: true + - DisplayName: 'System, Server Topology, ServerTopology 1 Aisle Name' + Value: Aisle 5 + IsIgnored: false tags: - modify_profile diff --git a/playbooks/ome/template/ome_template.yml b/playbooks/ome/template/ome_template.yml index 3a865fc33..58ac15ffb 100644 --- a/playbooks/ome/template/ome_template.yml +++ b/playbooks/ome/template/ome_template.yml @@ -38,6 +38,28 @@ Value: "Test Attribute" IsIgnored: false + - name: Modify template name, description, and attribute using detailed view + ome_template: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: "modify" + template_id: 12 + attributes: + Name: "New Custom Template" + Description: "Custom Template Description" + Attributes: + # Enter the comma separated string as appearing in the Detailed view on GUI + # NIC -> NIC.Integrated.1-1-1 -> NIC Configuration -> Wake On LAN1 + - DisplayName: 'NIC, NIC.Integrated.1-1-1, NIC Configuration, Wake On LAN' + Value: Enabled + IsIgnored: false + # System -> LCD Configuration -> LCD 1 User Defined String for LCD + - DisplayName: 'System, LCD Configuration, LCD 1 User Defined String for LCD' + Value: LCD str by OMAM + IsIgnored: false + - name: "Deploy template on multiple devices " ome_template: hostname: "192.168.0.1" diff --git a/plugins/module_utils/utils.py b/plugins/module_utils/utils.py index 6eb45f051..52e00abf1 100644 --- a/plugins/module_utils/utils.py +++ b/plugins/module_utils/utils.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Dell EMC OpenManage Ansible Modules -# Version 5.1.0 +# Version 5.2.0 # Copyright (C) 2022 Dell Inc. or its subsidiaries. All Rights Reserved. # Redistribution and use in source and binary forms, with or without modification, @@ -139,3 +139,25 @@ def apply_diff_key(src, dest, klist): dest[k] = v diff_cnt = diff_cnt + 1 return diff_cnt + + +def wait_for_job_completion(redfish_obj, uri, job_wait=True, wait_timeout=120, sleep_time=10): + max_sleep_time = wait_timeout + sleep_interval = sleep_time + if job_wait: + while max_sleep_time: + if max_sleep_time > sleep_interval: + max_sleep_time = max_sleep_time - sleep_interval + else: + sleep_interval = max_sleep_time + max_sleep_time = 0 + time.sleep(sleep_interval) + job_resp = redfish_obj.invoke_request("GET", uri) + if job_resp.json_data.get("PercentComplete") == 100: + time.sleep(10) + return job_resp, "" + else: + job_resp = redfish_obj.invoke_request("GET", uri) + time.sleep(10) + return job_resp, "" + return {}, "The job is not complete after {0} seconds.".format(wait_timeout) diff --git a/plugins/modules/idrac_redfish_storage_controller.py b/plugins/modules/idrac_redfish_storage_controller.py index ab0c0f448..bfa243077 100644 --- a/plugins/modules/idrac_redfish_storage_controller.py +++ b/plugins/modules/idrac_redfish_storage_controller.py @@ -3,7 +3,7 @@ # # Dell EMC OpenManage Ansible Modules -# Version 5.0.1 +# Version 5.2.0 # Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) @@ -16,80 +16,124 @@ DOCUMENTATION = r''' --- module: idrac_redfish_storage_controller -short_description: Configures the storage controller settings +short_description: Configures the physical disk, virtual disk, and storage controller settings version_added: "2.1.0" description: - - This module configures the settings of the storage controller using Redfish. + - This module allows the users to configure the settings of the physical disk, virtual disk, + and storage controller. extends_documentation_fragment: - dellemc.openmanage.redfish_auth_options options: command: description: - - These actions may require a system reset, depending on the controller's capabilities. + - These actions may require a system reset, depending on the capabilities of the controller. - C(ResetConfig) - Deletes all the virtual disks and unassigns all hot spares on physical disks. + I(controller_id) is required for this operation. - C(AssignSpare) - Assigns a physical disk as a dedicated or global hot spare for a virtual disk. - - >- - C(SetControllerKey) - Sets the key on controllers, which is used to encrypt the drives in Local key - Management(LKM). - - C(RemoveControllerKey) - Erases the encryption key on the controller. - - C(ReKey) - Resets the key on the controller. - choices: [ResetConfig, AssignSpare, SetControllerKey, RemoveControllerKey, ReKey] + I(target) is required for this operation. + - C(SetControllerKey) - Sets the key on controllers, which is used to encrypt the drives in Local + Key Management(LKM). I(controller_id), I(key), and I(key_id) are required for this operation. + - C(RemoveControllerKey) - Deletes the encryption key on the controller. + I(controller_id) is required for this operation. + - C(ReKey) - Resets the key on the controller and it always reports as changes found when check mode is enabled. + I(controller_id), I(old_key), I(key_id), and I(key) is required for this operation. + - C(UnassignSpare) - To unassign the Global or Dedicated hot spare. I(target) is required for this operation. + - C(EnableControllerEncryption) - To enable Local Key Management (LKM) or Secure Enterprise Key Manager (SEKM) + on controllers that support encryption of the drives. I(controller_id), I(key), and I(key_id) are required + for this operation. + - C(BlinkTarget) - Blinks the target virtual drive or physical disk and it always reports as changes found + when check mode is enabled. I(target) or I(volume_id) is required for this operation. + - C(UnBlinkTarget) - Unblink the target virtual drive or physical disk and and it always reports as changes + found when check mode is enabled. I(target) or I(volume_id) is required for this operation. + - C(ConvertToRAID) - Converts the disk form non-Raid to Raid. I(target) is required for this operation. + - C(ConvertToNonRAID) - Converts the disk form Raid to non-Raid. I(target) is required for this operation. + - C(ChangePDStateToOnline) - To set the disk status to online. I(target) is required for this operation. + - C(ChangePDStateToOffline) - To set the disk status to offline. I(target) is required for this operation. + choices: [ResetConfig, AssignSpare, SetControllerKey, RemoveControllerKey, ReKey, UnassignSpare, + EnableControllerEncryption, BlinkTarget, UnBlinkTarget, ConvertToRAID, ConvertToNonRAID, + ChangePDStateToOnline, ChangePDStateToOffline] default: AssignSpare type: str target: description: - - Fully Qualified Device Descriptor (FQDD) of the target physical drive that is assigned as a spare. - - This is mandatory when I(command) is C(AssignSpare). - - If I(volume_id) is not specified or empty, this physical drive will be assigned as a global hot spare. - type: str + - Fully Qualified Device Descriptor (FQDD) of the target physical drive. + - This is mandatory when I(command) is C(AssignSpare), C(UnassisgnSpare), + C(ChangePDStateToOnline), C(ChangePDStateToOffline), C(ConvertToRAID), or C(ConvertToNonRAID). + - If I(volume_id) is not specified or empty, this physical drive will be + assigned as a global hot spare when I(command) is C(AssignSpare). + - "Notes: Global or Dedicated hot spare can be assigned only once for a physical disk, + Re-assign cannot be done when I(command) is C(AssignSpare)." + type: list + elements: str + aliases: [drive_id] volume_id: description: - - FQDD of the volumes to which a hot spare is assigned. - - Applicable if I(command) is C(AssignSpare). - - To know the number of volumes to which a hot spare can be assigned, refer iDRAC Redfish API guide. + - Fully Qualified Device Descriptor (FQDD) of the volume. + - Applicable if I(command) is C(AssignSpare), C(BlinkTarget), and C(UnBlinkTarget). + - I(volume_id) or I(target) is required when the I(command) is C(BlinkTarget) or C(UnBlinkTarget), + if both are specified I(target) is considered. + - To know the number of volumes to which a hot spare can be assigned, refer iDRAC Redfish API documentation. type: list elements: str controller_id: description: - - FQDD of the storage controller. For example- 'RAID.Slot.1-1'. - - >- - This option is mandatory when I(command) is C(ResetConfig), C(SetControllerKey), C(RemoveControllerKey) and - C(ReKey). + - Fully Qualified Device Descriptor (FQDD) of the storage controller. For example-'RAID.Slot.1-1'. + - This option is mandatory when I(command) is C(ResetConfig), C(SetControllerKey), + C(RemoveControllerKey), C(ReKey), or C(EnableControllerEncryption). type: str key: description: - - >- - A new security key passphrase that the encryption-capable controller uses to create the encryption key. The - controller uses the encryption key to lock or unlock access to the Self Encryption Disk(SED). Only one - encryption key can be created for each controller. - - This is mandatory when I(command) is C(SetControllerKey) or C(ReKey), and when I(mode) is C(LKM). + - A new security key passphrase that the encryption-capable controller uses to create the + encryption key. The controller uses the encryption key to lock or unlock access to the + Self-Encrypting Drive (SED). Only one encryption key can be created for each controller. + - This is mandatory when I(command) is C(SetControllerKey), C(ReKey), or C(EnableControllerEncryption) + and when I(mode) is C(LKM). + - The length of the key can be a maximum of 32 characters in length, where the expanded form of + the special character is counted as a single character. + - "The key must contain at least one character from each of the character classes: uppercase, + lowercase, number, and special character." type: str key_id: description: - This is a user supplied text label associated with the passphrase. - - This is mandatory when I(command) is C(SetControllerKey) or C(ReKey), and when I(mode) is C(LKM). + - This is mandatory when I(command) is C(SetControllerKey), C(ReKey), or C(EnableControllerEncryption) + and when I(mode) is C(LKM). + - The length of I(key_id) can be a maximum of 32 characters in length and should not have any spaces. type: str old_key: description: - - Security key passphrase used by the encryption-capable controller.. + - Security key passphrase used by the encryption-capable controller. - This option is mandatory when I(command) is C(ReKey) and I(mode) is C(LKM). type: str mode: description: - - >- - Encryption mode of the encryption-capable controller: 1 - Local Key Management (LKM), - 2 - Security Enterprise Key Manager(SEKM). - - This option is applicable only when I(command) is C(ReKey). + - Encryption mode of the encryption capable controller. + - This option is applicable only when I(command) is C(ReKey) or C(EnableControllerEncryption). - C(SEKM) requires secure enterprise key manager license on the iDRAC. + - C(LKM) to choose mode as local key mode. choices: [LKM, SEKM] default: LKM type: str + job_wait: + description: + - Provides the option if the module has to wait for the job to be completed. + type: bool + default: False + job_wait_timeout: + description: + - The maximum wait time of job completion in seconds before the job tracking is stopped. + - This option is applicable when I(job_wait) is C(True). + type: int + default: 120 requirements: - "python >= 3.8.6" -author: "Jagadeesh N V (@jagadeeshnv)" +author: + - "Jagadeesh N V (@jagadeeshnv)" + - "Felix Stephen (@felixs88)" notes: - - Run this module from a system that has direct access to DellEMC iDRAC. - - This module does not support C(check_mode). + - Run this module from a system that has direct access to Dell EMC iDRAC. + - This module always reports as changes found when C(ReKey), C(BlinkTarget), and C(UnBlinkTarget). + - This module supports C(check_mode). ''' EXAMPLES = r''' @@ -116,6 +160,17 @@ tags: - assign_global_hot_spare +- name: Unassign hot spare + dellemc.openmanage.idrac_redfish_storage_controller: + baseuri: "192.168.0.1:443" + username: "user_name" + password: "user_password" + ca_path: "/path/to/ca_cert.pem" + target: "Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1" + command: UnassignSpare + tags: + - un-assign-hot-spare + - name: Set controller encryption key dellemc.openmanage.idrac_redfish_storage_controller: baseuri: "192.168.0.1:443" @@ -176,6 +231,108 @@ controller_id: "RAID.Slot.1-1" tags: - reset_config + +- name: Enable controller encryption + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "EnableControllerEncryption" + controller_id: "RAID.Slot.1-1" + mode: "LKM" + key: "your_Key@123" + key_id: "your_Keyid@123" + tags: + - enable-encrypt + +- name: Blink physical disk. + dellemc.openmanage.idrac_redfish_storage_controller: + baseuri: "192.168.0.1:443" + username: "user_name" + password: "user_password" + ca_path: "/path/to/ca_cert.pem" + command: BlinkTarget + target: "Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1" + tags: + - blink-target + +- name: Blink virtual drive. + dellemc.openmanage.idrac_redfish_storage_controller: + baseuri: "192.168.0.1:443" + username: "user_name" + password: "user_password" + ca_path: "/path/to/ca_cert.pem" + command: BlinkTarget + volume_id: "Disk.Virtual.0:RAID.Slot.1-1" + tags: + - blink-volume + +- name: Unblink physical disk. + dellemc.openmanage.idrac_redfish_storage_controller: + baseuri: "192.168.0.1:443" + username: "user_name" + password: "user_password" + ca_path: "/path/to/ca_cert.pem" + command: UnBlinkTarget + target: "Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1" + tags: + - unblink-target + +- name: Unblink virtual drive. + dellemc.openmanage.idrac_redfish_storage_controller: + baseuri: "192.168.0.1:443" + username: "user_name" + password: "user_password" + ca_path: "/path/to/ca_cert.pem" + command: UnBlinkTarget + volume_id: "Disk.Virtual.0:RAID.Slot.1-1" + tags: + - unblink-drive + +- name: Convert physical disk to RAID + dellemc.openmanage.idrac_redfish_storage_controller: + baseuri: "192.168.0.1:443" + username: "user_name" + password: "user_password" + ca_path: "/path/to/ca_cert.pem" + command: "ConvertToRAID" + target: "Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1" + tags: + - convert-raid + +- name: Convert physical disk to non-RAID + dellemc.openmanage.idrac_redfish_storage_controller: + baseuri: "192.168.0.1:443" + username: "user_name" + password: "user_password" + ca_path: "/path/to/ca_cert.pem" + command: "ConvertToNonRAID" + target: "Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1" + tags: + - convert-non-raid + +- name: Change physical disk state to online. + dellemc.openmanage.idrac_redfish_storage_controller: + baseuri: "192.168.0.1:443" + username: "user_name" + password: "user_password" + ca_path: "/path/to/ca_cert.pem" + command: "ChangePDStateToOnline" + target: "Disk.Bay.1:Enclosure.Internal.0-1:RAID.Slot.1-1" + tags: + - pd-state-online + +- name: Change physical disk state to offline. + dellemc.openmanage.idrac_redfish_storage_controller: + baseuri: "192.168.0.1:443" + username: "user_name" + password: "user_password" + ca_path: "/path/to/ca_cert.pem" + command: "ChangePDStateToOnline" + target: "Disk.Bay.1:Enclosure.Internal.0-1:RAID.Slot.1-1" + tags: + - pd-state-offline ''' RETURN = r''' @@ -193,6 +350,27 @@ "id": "JID_XXXXXXXXXXXXX", "uri": "/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/JID_XXXXXXXXXXXXX" } +status: + type: dict + description: status of the submitted job. + returned: always + sample: { + "ActualRunningStartTime": "2022-02-09T04:42:41", + "ActualRunningStopTime": "2022-02-09T04:44:00", + "CompletionTime": "2022-02-09T04:44:00", + "Description": "Job Instance", + "EndTime": "TIME_NA", + "Id": "JID_444033604418", + "JobState": "Completed", + "JobType": "RealTimeNoRebootConfiguration", + "Message": "Job completed successfully.", + "MessageArgs":[], + "MessageId": "PR19", + "Name": "Configure: RAID.Integrated.1-1", + "PercentComplete": 100, + "StartTime": "2022-02-09T04:42:40", + "TargetSettingsURI": null + } error_info: type: dict description: Details of a http error. @@ -221,169 +399,321 @@ import json from ansible_collections.dellemc.openmanage.plugins.module_utils.redfish import Redfish, redfish_auth_params +from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import wait_for_job_completion, strip_substr_dict from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError from ansible.module_utils.urls import ConnectionError, SSLValidationError SYSTEM_ID = "System.Embedded.1" -DRIVES_URI = "/redfish/v1/Systems/{system_id}/Storage/Drives/{id}" -CONTROLLER_URI = "/redfish/v1/Systems/{system_id}/Storage/{id}" -VOLUME_ID_URI = "/redfish/v1/Systems/{system_id}/Storage/Volumes/{id}" -RAID_ACTION_URI_PREFIX = "/redfish/v1/Dell/Systems/{system_id}/DellRaidService/Actions/DellRaidService.{op}" -RAID_SERVICE_URI = "/redfish/v1/Dell/Systems/{system_id}/DellRaidService" -DELL_CONTROLLER_URI = "/redfish/v1/Systems/{system_id}/Storage/{id}" - - -def check_id_exists(module, redfish_obj, item_id, uri): - specified_id = module.params.get(item_id) - item_uri = uri.format(system_id=SYSTEM_ID, id=specified_id) - msg = "{0} with id {1} not found in system".format(item_id, specified_id) +MANAGER_ID = "iDRAC.Embedded.1" +RAID_ACTION_URI = "/redfish/v1/Systems/{system_id}/Oem/Dell/DellRaidService/Actions/DellRaidService.{action}" +CONTROLLER_URI = "/redfish/v1/Dell/Systems/{system_id}/Storage/DellController/{controller_id}" +VOLUME_URI = "/redfish/v1/Systems/{system_id}/Storage/{controller_id}/Volumes" +PD_URI = "/redfish/v1/Systems/System.Embedded.1/Storage/{controller_id}/Drives/{drive_id}" + +JOB_SUBMISSION = "Successfully submitted the job that performs the '{0}' operation." +JOB_COMPLETION = "Successfully performed the '{0}' operation." +CHANGES_FOUND = "Changes found to be applied." +NO_CHANGES_FOUND = "No changes found to be applied." +TARGET_ERR_MSG = "The Fully Qualified Device Descriptor (FQDD) of the target {0} must be only one." +PD_ERROR_MSG = "Unable to locate the physical disk with the ID: {0}" +ENCRYPT_ERR_MSG = "The storage controller '{0}' does not support encryption." + + +def check_id_exists(module, redfish_obj, key, item_id, uri): + msg = "{0} with id '{1}' not found in system".format(key, item_id) try: - resp = redfish_obj.invoke_request('GET', item_uri) + resp = redfish_obj.invoke_request("GET", uri.format(system_id=SYSTEM_ID, controller_id=item_id)) if not resp.success: module.fail_json(msg=msg) except HTTPError as err: module.fail_json(msg=msg, error_info=json.load(err)) - except (RuntimeError, URLError, SSLValidationError, ConnectionError, KeyError, ImportError, - ValueError, TypeError) as err: - module.fail_json(msg=str(err)) - - -def check_volume_array_exists(module, redfish_obj): - volume_array = module.params.get("volume_id") - msg = "Unable to locate the virtual disk with the ID: {vol}" - for vol in volume_array: - try: - resp = redfish_obj.invoke_request('GET', VOLUME_ID_URI.format(system_id=SYSTEM_ID, id=vol)) - if not resp.success: - module.fail_json(msg=msg.format(vol=vol)) - except HTTPError as err: - module.fail_json(msg=msg.format(vol=vol), error_info=json.load(err)) - except (RuntimeError, URLError, SSLValidationError, ConnectionError, KeyError, ImportError, - ValueError, TypeError) as err: - module.fail_json(msg=str(err)) - - -def check_raid_service(module, redfish_obj): - msg = "Installed version of iDRAC does not support this feature using Redfish API" + + +def ctrl_key(module, redfish_obj): + resp, job_uri, job_id, payload = None, None, None, {} + controller_id = module.params.get("controller_id") + command, mode = module.params["command"], module.params["mode"] + key, key_id = module.params.get("key"), module.params.get("key_id") + check_id_exists(module, redfish_obj, "controller_id", controller_id, CONTROLLER_URI) + ctrl_resp = redfish_obj.invoke_request("GET", CONTROLLER_URI.format(system_id=SYSTEM_ID, + controller_id=controller_id)) + security_status = ctrl_resp.json_data.get("SecurityStatus") + if security_status == "EncryptionNotCapable": + module.fail_json(msg=ENCRYPT_ERR_MSG.format(controller_id)) + ctrl_key_id = ctrl_resp.json_data.get("KeyID") + if command == "SetControllerKey": + if module.check_mode and ctrl_key_id is None: + module.exit_json(msg=CHANGES_FOUND, changed=True) + elif (module.check_mode and ctrl_key_id is not None) or (not module.check_mode and ctrl_key_id is not None): + module.exit_json(msg=NO_CHANGES_FOUND) + payload = {"TargetFQDD": controller_id, "Key": key, "Keyid": key_id} + elif command == "ReKey": + if module.check_mode: + module.exit_json(msg=CHANGES_FOUND, changed=True) + if mode == "LKM": + payload = {"TargetFQDD": controller_id, "Mode": mode, "NewKey": key, + "Keyid": key_id, "OldKey": module.params.get("old_key")} + else: + payload = {"TargetFQDD": controller_id, "Mode": mode} + elif command == "RemoveControllerKey": + if module.check_mode and ctrl_key_id is not None: + module.exit_json(msg=CHANGES_FOUND, changed=True) + elif (module.check_mode and ctrl_key_id is None) or (not module.check_mode and ctrl_key_id is None): + module.exit_json(msg=NO_CHANGES_FOUND) + payload = {"TargetFQDD": controller_id} + elif command == "EnableControllerEncryption": + if module.check_mode and not security_status == "SecurityKeyAssigned": + module.exit_json(msg=CHANGES_FOUND, changed=True) + elif (module.check_mode and security_status == "SecurityKeyAssigned") or \ + (not module.check_mode and security_status == "SecurityKeyAssigned"): + module.exit_json(msg=NO_CHANGES_FOUND) + payload = {"TargetFQDD": controller_id, "Mode": mode} + if mode == "LKM": + payload["Key"] = key + payload["Keyid"] = key_id + resp = redfish_obj.invoke_request("POST", RAID_ACTION_URI.format(system_id=SYSTEM_ID, action=command), + data=payload) + job_uri = resp.headers.get("Location") + job_id = job_uri.split("/")[-1] + return resp, job_uri, job_id + + +def ctrl_reset_config(module, redfish_obj): + resp, job_uri, job_id = None, None, None + controller_id = module.params.get("controller_id") + check_id_exists(module, redfish_obj, "controller_id", controller_id, CONTROLLER_URI) + member_resp = redfish_obj.invoke_request("GET", VOLUME_URI.format(system_id=SYSTEM_ID, controller_id=controller_id)) + members = member_resp.json_data.get("Members") + if module.check_mode and members: + module.exit_json(msg=CHANGES_FOUND, changed=True) + elif (module.check_mode and not members) or (not module.check_mode and not members): + module.exit_json(msg=NO_CHANGES_FOUND) + else: + resp = redfish_obj.invoke_request("POST", RAID_ACTION_URI.format(system_id=SYSTEM_ID, + action=module.params["command"]), + data={"TargetFQDD": controller_id}) + job_uri = resp.headers.get("Location") + job_id = job_uri.split("/")[-1] + return resp, job_uri, job_id + + +def hot_spare_config(module, redfish_obj): + target, command = module.params.get("target"), module.params["command"] + resp, job_uri, job_id = None, None, None + volume = module.params.get("volume_id") + controller_id = target[0].split(":")[-1] + drive_id = target[0] try: - resp = redfish_obj.invoke_request('GET', RAID_SERVICE_URI.format(system_id=SYSTEM_ID)) - if not resp.success: - module.fail_json(msg=msg) - except HTTPError as err: - module.fail_json(msg=msg, error_info=json.load(err)) - except (RuntimeError, URLError, SSLValidationError, ConnectionError, KeyError, ImportError, - ValueError, TypeError) as err: - module.fail_json(msg=str(err)) + pd_resp = redfish_obj.invoke_request("GET", PD_URI.format(controller_id=controller_id, drive_id=drive_id)) + except HTTPError: + module.fail_json(msg=PD_ERROR_MSG.format(drive_id)) + else: + hot_spare = pd_resp.json_data.get("HotspareType") + if module.check_mode and hot_spare == "None" and command == "AssignSpare" or \ + (module.check_mode and not hot_spare == "None" and command == "UnassignSpare"): + module.exit_json(msg=CHANGES_FOUND, changed=True) + elif (module.check_mode and hot_spare in ["Dedicated", "Global"] and command == "AssignSpare") or \ + (not module.check_mode and hot_spare in ["Dedicated", "Global"] and command == "AssignSpare") or \ + (module.check_mode and hot_spare == "None" and command == "UnassignSpare") or \ + (not module.check_mode and hot_spare == "None" and command == "UnassignSpare"): + module.exit_json(msg=NO_CHANGES_FOUND) + else: + payload = {"TargetFQDD": drive_id} + if volume is not None and command == "AssignSpare": + payload["VirtualDiskArray"] = volume + resp = redfish_obj.invoke_request("POST", RAID_ACTION_URI.format(system_id=SYSTEM_ID, + action=command), + data=payload) + job_uri = resp.headers.get("Location") + job_id = job_uri.split("/")[-1] + return resp, job_uri, job_id -def check_encryption_capability(module, redfish_obj): - ctrl_id = module.params["controller_id"] - uri = DELL_CONTROLLER_URI.format(system_id=SYSTEM_ID, id=ctrl_id) - response = redfish_obj.invoke_request('GET', uri) - if response.success: - data = response.json_data - if data['Oem']['Dell']['DellController']['SecurityStatus'] == "EncryptionNotCapable": - module.fail_json(msg="Encryption is not supported on the storage controller: {0}".format(ctrl_id)) +def change_pd_status(module, redfish_obj): + resp, job_uri, job_id = None, None, None + command, target = module.params["command"], module.params.get("target") + controller_id = target[0].split(":")[-1] + drive_id = target[0] + state = "Online" if command == "ChangePDStateToOnline" else "Offline" + try: + pd_resp = redfish_obj.invoke_request("GET", PD_URI.format(controller_id=controller_id, drive_id=drive_id)) + raid_status = pd_resp.json_data["Oem"]["Dell"]["DellPhysicalDisk"]["RaidStatus"] + except HTTPError: + module.fail_json(msg=PD_ERROR_MSG.format(drive_id)) + else: + if module.check_mode and not state == raid_status: + module.exit_json(msg=CHANGES_FOUND, changed=True) + elif (module.check_mode and state == raid_status) or (not module.check_mode and state == raid_status): + module.exit_json(msg=NO_CHANGES_FOUND) + else: + resp = redfish_obj.invoke_request("POST", RAID_ACTION_URI.format(system_id=SYSTEM_ID, + action="ChangePDState"), + data={"TargetFQDD": drive_id, "State": state}) + job_uri = resp.headers.get("Location") + job_id = job_uri.split("/")[-1] + return resp, job_uri, job_id + + +def convert_raid_status(module, redfish_obj): + resp, job_uri, job_id = None, None, None + command, target = module.params["command"], module.params.get("target") + ctrl, pd_ready_state = None, [] + try: + for ctrl in target: + controller_id = ctrl.split(":")[-1] + pd_resp = redfish_obj.invoke_request("GET", PD_URI.format(controller_id=controller_id, drive_id=ctrl)) + raid_status = pd_resp.json_data["Oem"]["Dell"]["DellPhysicalDisk"]["RaidStatus"] + pd_ready_state.append(raid_status) + except HTTPError: + module.fail_json(msg=PD_ERROR_MSG.format(ctrl)) + else: + if (command == "ConvertToRAID" and module.check_mode and 0 < pd_ready_state.count("NonRAID")) or \ + (command == "ConvertToNonRAID" and module.check_mode and 0 < pd_ready_state.count("Ready")): + module.exit_json(msg=CHANGES_FOUND, changed=True) + elif (command == "ConvertToRAID" and module.check_mode and + len(pd_ready_state) == pd_ready_state.count("Ready")) or \ + (command == "ConvertToRAID" and not module.check_mode and + len(pd_ready_state) == pd_ready_state.count("Ready")) or \ + (command == "ConvertToNonRAID" and module.check_mode and + len(pd_ready_state) == pd_ready_state.count("NonRAID")) or \ + (command == "ConvertToNonRAID" and not module.check_mode and + len(pd_ready_state) == pd_ready_state.count("NonRAID")): + module.exit_json(msg=NO_CHANGES_FOUND) + else: + resp = redfish_obj.invoke_request("POST", RAID_ACTION_URI.format(system_id=SYSTEM_ID, + action=command), + data={"PDArray": target}) + job_uri = resp.headers.get("Location") + job_id = job_uri.split("/")[-1] + return resp, job_uri, job_id + + +def target_identify_pattern(module, redfish_obj): + target, volume = module.params.get("target"), module.params.get("volume_id") + command = module.params.get("command") + payload = {"TargetFQDD": None} + + if target is not None and volume is None: + payload = {"TargetFQDD": target[0]} + elif volume is not None and target is None: + payload = {"TargetFQDD": volume[0]} + elif target is not None and volume is not None: + payload = {"TargetFQDD": target[0]} + + if module.check_mode: + module.exit_json(msg=CHANGES_FOUND, changed=True) + resp = redfish_obj.invoke_request("POST", RAID_ACTION_URI.format(system_id=SYSTEM_ID, + action=command), + data=payload) + return resp def validate_inputs(module): module_params = module.params - if module_params.get("command") == "ReKey" and module_params.get("mode") == "LKM": + command = module_params.get("command") + mode = module_params.get("mode") + if command == "ReKey" and mode == "LKM": key = module_params.get("key") key_id = module_params.get("key_id") old_key = module_params.get("old_key") if not all([key, key_id, old_key]): - module.fail_json(msg="All of the following: key, key_id and old_key are required for ReKey operation.") + module.fail_json(msg="All of the following: key, key_id and old_key are " + "required for '{0}' operation.".format(command)) + elif command == "EnableControllerEncryption" and mode == "LKM": + key = module_params.get("key") + key_id = module_params.get("key_id") + if not all([key, key_id]): + module.fail_json(msg="All of the following: key, key_id are " + "required for '{0}' operation.".format(command)) + elif command in ["AssignSpare", "UnassignSpare", "BlinkTarget", "UnBlinkTarget"]: + target, volume = module_params.get("target"), module_params.get("volume") + if target is not None and not 1 >= len(target): + module.fail_json(msg=TARGET_ERR_MSG.format("physical disk")) + if volume is not None and not 1 >= len(volume): + module.fail_json(msg=TARGET_ERR_MSG.format("virtual drive")) + elif command in ["ChangePDStateToOnline", "ChangePDStateToOffline"]: + target = module.params.get("target") + if target is not None and not 1 >= len(target): + module.fail_json(msg=TARGET_ERR_MSG.format("physical disk")) def main(): - payload_map = { - "controller_id": "TargetFQDD", - "volume_id": "VirtualDiskArray", - "target": "TargetFQDD", - "key": "Key", - "key_id": "Keyid", - "old_key": "OldKey", - "mode": "Mode" - } - req_map = { - 'ResetConfig': ["controller_id"], - 'AssignSpare': ["volume_id", "target"], - 'SetControllerKey': ["controller_id", "key", "key_id"], - 'RemoveControllerKey': ["controller_id"], - 'ReKey': ["controller_id", "mode"] - } specs = { - "baseuri": {"required": True, "type": 'str'}, - "username": {"required": True, "type": 'str'}, - "password": {"required": True, "type": 'str', "no_log": True}, - "validate_certs": {"type": "bool", "default": True}, - "ca_path": {"type": "path"}, - "timeout": {"type": "int", "default": 30}, - "command": {"required": False, - "choices": ['ResetConfig', 'AssignSpare', 'SetControllerKey', 'RemoveControllerKey', 'ReKey'], - "default": 'AssignSpare'}, - "controller_id": {"required": False, "type": 'str'}, - "volume_id": {"required": False, "type": 'list', "elements": 'str'}, - "target": {"required": False, "type": 'str'}, - "key": {"required": False, "type": 'str', "no_log": True}, - "key_id": {"required": False, "type": 'str'}, - "old_key": {"required": False, "type": 'str', "no_log": True}, - "mode": {"required": False, "choices": ['LKM', 'SEKM'], "default": 'LKM'} + "command": {"required": False, "default": "AssignSpare", + "choices": ["ResetConfig", "AssignSpare", "SetControllerKey", "RemoveControllerKey", + "ReKey", "UnassignSpare", "EnableControllerEncryption", "BlinkTarget", + "UnBlinkTarget", "ConvertToRAID", "ConvertToNonRAID", "ChangePDStateToOnline", + "ChangePDStateToOffline"]}, + "controller_id": {"required": False, "type": "str"}, + "volume_id": {"required": False, "type": "list", "elements": "str"}, + "target": {"required": False, "type": "list", "elements": "str", "aliases": ["drive_id"]}, + "key": {"required": False, "type": "str", "no_log": True}, + "key_id": {"required": False, "type": "str"}, + "old_key": {"required": False, "type": "str", "no_log": True}, + "mode": {"required": False, "choices": ["LKM", "SEKM"], "default": "LKM"}, + "job_wait": {"required": False, "type": "bool", "default": False}, + "job_wait_timeout": {"required": False, "type": "int", "default": 120} } specs.update(redfish_auth_params) module = AnsibleModule( argument_spec=specs, required_if=[ - ["command", "SetControllerKey", req_map["SetControllerKey"]], - ["command", "ReKey", req_map["ReKey"]], - ["command", "ResetConfig", req_map["ResetConfig"]], - ["command", "RemoveControllerKey", req_map["RemoveControllerKey"]], - ["command", "AssignSpare", ["target"]] + ["command", "SetControllerKey", ["controller_id", "key", "key_id"]], + ["command", "ReKey", ["controller_id", "mode"]], ["command", "ResetConfig", ["controller_id"]], + ["command", "RemoveControllerKey", ["controller_id"]], ["command", "AssignSpare", ["target"]], + ["command", "UnassignSpare", ["target"]], ["command", "EnableControllerEncryption", ["controller_id"]], + ["command", "BlinkTarget", ["target", "volume_id"], True], + ["command", "UnBlinkTarget", ["target", "volume_id"], True], ["command", "ConvertToRAID", ["target"]], + ["command", "ConvertToNonRAID", ["target"]], ["command", "ChangePDStateToOnline", ["target"]], + ["command", "ChangePDStateToOffline", ["target"]] ], - supports_check_mode=False) + supports_check_mode=True) + validate_inputs(module) try: - validate_inputs(module) + command = module.params["command"] with Redfish(module.params, req_session=True) as redfish_obj: - ctrl_fn = module.params['command'] - check_raid_service(module, redfish_obj) - if ctrl_fn == "AssignSpare": - if module.params.get("volume_id"): - check_volume_array_exists(module, redfish_obj) - check_id_exists(module, redfish_obj, "target", DRIVES_URI) + if command == "ResetConfig": + resp, job_uri, job_id = ctrl_reset_config(module, redfish_obj) + elif command == "SetControllerKey" or command == "ReKey" or \ + command == "RemoveControllerKey" or command == "EnableControllerEncryption": + resp, job_uri, job_id = ctrl_key(module, redfish_obj) + elif command == "AssignSpare" or command == "UnassignSpare": + resp, job_uri, job_id = hot_spare_config(module, redfish_obj) + elif command == "BlinkTarget" or command == "UnBlinkTarget": + resp = target_identify_pattern(module, redfish_obj) + if resp.success and resp.status_code == 200: + module.exit_json(msg=JOB_COMPLETION.format(command), changed=True) + elif command == "ConvertToRAID" or command == "ConvertToNonRAID": + resp, job_uri, job_id = convert_raid_status(module, redfish_obj) + elif command == "ChangePDStateToOnline" or command == "ChangePDStateToOffline": + resp, job_uri, job_id = change_pd_status(module, redfish_obj) + + job_wait = module.params["job_wait"] + if job_wait: + resp, msg = wait_for_job_completion(redfish_obj, job_uri, job_wait=job_wait, + wait_timeout=module.params["job_wait_timeout"]) + job_data = strip_substr_dict(resp.json_data) + if job_data["JobState"] == "Failed": + changed, failed = False, True + else: + changed, failed = True, False + module.exit_json(msg=JOB_COMPLETION.format(command), task={"id": job_id, "uri": job_uri}, + status=job_data, changed=changed, failed=failed) else: - check_id_exists(module, redfish_obj, "controller_id", CONTROLLER_URI) - msg = "Failed to submit the job to that performs the {0} operation.".format(ctrl_fn) - if ctrl_fn in ["SetControllerKey", "ReKey"]: - check_encryption_capability(module, redfish_obj) - payload_list = req_map[ctrl_fn] - payload = {} - for p in payload_list: - payload[payload_map[p]] = module.params.get(p) - if ctrl_fn == "ReKey": - if module.params["mode"] == "LKM": - payload["NewKey"] = module.params.get("key") - payload["OldKey"] = module.params.get("old_key") - payload["Keyid"] = module.params.get("key_id") - if ctrl_fn == "AssignSpare" and not payload.get("VirtualDiskArray"): - payload.pop("VirtualDiskArray") - built_uri = RAID_ACTION_URI_PREFIX.format(system_id=SYSTEM_ID, op=ctrl_fn) - resp = redfish_obj.invoke_request("POST", built_uri, data=payload) - if resp.success: - status = {} - status["uri"] = resp.headers.get("Location") - if status.get("uri") is not None: - status["id"] = status["uri"].split("/")[-1] - module.exit_json(changed=True, msg="Successfully submitted the job that performs the {0} operation" - .format(ctrl_fn), task=status) + resp, msg = wait_for_job_completion(redfish_obj, job_uri, job_wait=job_wait, + wait_timeout=module.params["job_wait_timeout"]) + job_data = strip_substr_dict(resp.json_data) + module.exit_json(msg=JOB_SUBMISSION.format(command), task={"id": job_id, "uri": job_uri}, + status=job_data) except HTTPError as err: module.fail_json(msg=str(err), error_info=json.load(err)) - except (RuntimeError, URLError, SSLValidationError, ConnectionError, KeyError, ImportError, - ValueError, TypeError) as err: - module.fail_json(msg=str(err)) - module.fail_json(msg=msg) + except URLError as err: + module.exit_json(msg=str(err), unreachable=True) + except (RuntimeError, SSLValidationError, ConnectionError, KeyError, + ImportError, ValueError, TypeError, AttributeError) as e: + module.fail_json(msg=str(e)) if __name__ == '__main__': diff --git a/plugins/modules/ome_application_console_preferences.py b/plugins/modules/ome_application_console_preferences.py new file mode 100644 index 000000000..67b00dc8b --- /dev/null +++ b/plugins/modules/ome_application_console_preferences.py @@ -0,0 +1,669 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# +# Dell EMC OpenManage Ansible Modules +# Version 5.2.0 +# Copyright (C) 2022 Dell Inc. or its subsidiaries. All Rights Reserved. + +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) +# + + +from __future__ import (absolute_import, division, print_function) + +__metaclass__ = type + +DOCUMENTATION = r''' +--- +module: ome_application_console_preferences +short_description: Configure console preferences on OpenManage Enterprise. +description: This module allows user to configure the console preferences on OpenManage Enterprise. +version_added: "5.2.0" +extends_documentation_fragment: + - dellemc.openmanage.ome_auth_options +options: + report_row_limit: + description: The maximum number of rows that you can view on OpenManage Enterprise reports. + type: int + device_health: + description: The time after which the health of the devices must be automatically monitored and updated + on the OpenManage Enterprise dashboard. + type: dict + suboptions: + health_check_interval: + description: The frequency at which the device health must be recorded and data stored. + type: int + health_check_interval_unit: + description: + - The time unit of the frequency at which the device health must be recorded and data stored. + - C(Hourly) to set the frequency in hours. + - C(Minutes) to set the frequency in minutes. + type: str + choices: [Hourly, Minutes] + health_and_power_state_on_connection_lost: + description: + - The latest recorded device health. + - C(last_known) to display the latest recorded device health when the power connection was lost. + - C(unknown) to display the latest recorded device health when the device status moved to unknown. + type: str + choices: [last_known, unknown] + discovery_settings: + description: The device naming to be used by the OpenManage Enterprise to identify the discovered iDRACs + and other devices. + type: dict + suboptions: + general_device_naming: + description: + - Applicable to all the discovered devices other than the iDRACs. + - C(DNS) to use the DNS name. + - C(NETBIOS) to use the NetBIOS name. + type: str + choices: [DNS, NETBIOS] + default: DNS + server_device_naming: + description: + - Applicable to iDRACs only. + - C(IDRAC_HOSTNAME) to use the iDRAC hostname. + - C(IDRAC_SYSTEM_HOSTNAME) to use the system hostname. + type: str + choices: [IDRAC_HOSTNAME, IDRAC_SYSTEM_HOSTNAME] + default: IDRAC_SYSTEM_HOSTNAME + invalid_device_hostname: + description: The invalid hostnames separated by a comma. + type: str + common_mac_addresses: + description: The common MAC addresses separated by a comma. + type: str + server_initiated_discovery: + description: Server initiated discovery settings. + type: dict + suboptions: + device_discovery_approval_policy: + description: + - Discovery approval policies. + - "C(Automatic) allows servers with iDRAC Firmware version 4.00.00.00, which are on the same network as the + console, to be discovered automatically by the console." + - C(Manual) for the servers to be discovered by the user manually. + type: str + choices: [Automatic, Manual] + set_trap_destination: + description: Trap destination settings. + type: bool + mx7000_onboarding_preferences: + description: + - Alert-forwarding behavior on chassis when they are onboarded. + - C(all) to receive all alert. + - C(chassis) to receive chassis category alerts only. + type: str + choices: [all, chassis] + builtin_appliance_share: + description: The external network share that the appliance must access to complete operations. + type: dict + suboptions: + share_options: + description: + - The share options. + - C(CIFS) to select CIFS share type. + - C(HTTPS) to select HTTPS share type. + type: str + choices: [CIFS, HTTPS] + cifs_options: + description: + - The SMB protocol version. + - I(cifs_options) is required I(share_options) is C(CIFS). + - C(V1) to enable SMBv1. + - C(V2) to enable SMBv2 + type: str + choices: [V1, V2] + email_sender_settings: + description: The email address of the user who is sending an email message. + type: str + trap_forwarding_format: + description: + - The trap forwarding format. + - C(Original) to retain the trap data as is. + - C(Normalized) to normalize the trap data. + type: str + choices: [Original, Normalized] + metrics_collection_settings: + description: The frequency of the PowerManager extension data maintenance and purging. + type: int +requirements: + - "python >= 3.8.6" +notes: + - This module supports C(check_mode). +author: + - Sachin Apagundi(@sachin-apa) + - Husniya Hameed (@husniya-hameed) +''' + +EXAMPLES = r''' +--- +- name: Update Console preferences with all the settings. + dellemc.openmanage.ome_application_console_preferences: + hostname: "192.168.0.1" + username: "username" + password: "password" + ca_path: "/path/to/ca_cert.pem" + report_row_limit: 123 + device_health: + health_check_interval: 1 + health_check_interval_unit: Hourly + health_and_power_state_on_connection_lost: last_known + discovery_settings: + general_device_naming: DNS + server_device_naming: IDRAC_HOSTNAME + invalid_device_hostname: "localhost" + common_mac_addresses: "::" + server_initiated_discovery: + device_discovery_approval_policy: Automatic + set_trap_destination: True + mx7000_onboarding_preferences: all + builtin_appliance_share: + share_options: CIFS + cifs_options: V1 + email_sender_settings: "admin@dell.com" + trap_forwarding_format: Normalized + metrics_collection_settings: 31 + +- name: Update Console preferences with report and device health settings. + dellemc.openmanage.ome_application_console_preferences: + hostname: "192.168.0.1" + username: "username" + password: "password" + ca_path: "/path/to/ca_cert.pem" + report_row_limit: 236 + device_health: + health_check_interval: 10 + health_check_interval_unit: Hourly + health_and_power_state_on_connection_lost: last_known + +- name: Update Console preferences with invalid device health settings. + dellemc.openmanage.ome_application_console_preferences: + hostname: "192.168.0.1" + username: "username" + password: "password" + ca_path: "/path/to/ca_cert.pem" + device_health: + health_check_interval: 65 + health_check_interval_unit: Minutes + +- name: Update Console preferences with discovery and built in appliance share settings. + dellemc.openmanage.ome_application_console_preferences: + hostname: "192.168.0.1" + username: "username" + password: "password" + ca_path: "/path/to/ca_cert.pem" + discovery_settings: + general_device_naming: DNS + server_device_naming: IDRAC_SYSTEM_HOSTNAME + invalid_device_hostname: "localhost" + common_mac_addresses: "00:53:45:00:00:00" + builtin_appliance_share: + share_options: CIFS + cifs_options: V1 + +- name: Update Console preferences with server initiated discovery, mx7000 onboarding preferences, email sender, + trap forwarding format, and metrics collection settings. + dellemc.openmanage.ome_application_console_preferences: + hostname: "192.168.0.1" + username: "username" + password: "password" + ca_path: "/path/to/ca_cert.pem" + server_initiated_discovery: + device_discovery_approval_policy: Automatic + set_trap_destination: True + mx7000_onboarding_preferences: chassis + email_sender_settings: "admin@dell.com" + trap_forwarding_format: Original + metrics_collection_settings: 365 +''' + +RETURN = r''' +--- +msg: + type: str + description: Overall status of the console preferences. + returned: always + sample: "Successfully update the console preferences." +console_preferences: + type: list + description: Details of the console preferences. + returned: on success + sample: + [ + { + "Name": "DEVICE_PREFERRED_NAME", + "DefaultValue": "SLOT_NAME", + "Value": "PREFER_DNS,PREFER_IDRAC_SYSTEM_HOSTNAME", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING" + }, + { + "Name": "INVALID_DEVICE_HOSTNAME", + "DefaultValue": "", + "Value": "localhost,localhost.localdomain,not defined,pv132t,pv136t,default,dell,idrac-", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING" + }, + { + "Name": "COMMON_MAC_ADDRESSES", + "DefaultValue": "", + "Value": "00:53:45:00:00:00,33:50:6F:45:30:30,50:50:54:50:30:30,00:00:FF:FF:FF:FF,20:41:53:59:4E:FF,00:00:00:00:00:00,20:41:53:59:4e:ff,00:00:00:00:00:00", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING" + }, + { + "Name": "SHARE_TYPE", + "DefaultValue": "CIFS", + "Value": "CIFS", + "DataType": "java.lang.String", + "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS" + }, + { + "Name": "TRAP_FORWARDING_SETTING", + "DefaultValue": "AsIs", + "Value": "Normalized", + "DataType": "java.lang.String", + "GroupName": "" + }, + { + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "3650000", + "DataType": "java.lang.Integer", + "GroupName": "" + }, + { + "Name": "CONSOLE_CONNECTION_SETTING", + "DefaultValue": "last_known", + "Value": "last_known", + "DataType": "java.lang.String", + "GroupName": "CONSOLE_CONNECTION_SETTING" + }, + { + "Name": "MIN_PROTOCOL_VERSION", + "DefaultValue": "V2", + "Value": "V1", + "DataType": "java.lang.String", + "GroupName": "CIFS_PROTOCOL_SETTINGS" + }, + { + "Name": "ALERT_ACKNOWLEDGEMENT_VIEW", + "DefaultValue": "2000", + "Value": "2000", + "DataType": "java.lang.Integer", + "GroupName": "" + }, + { + "Name": "AUTO_CONSOLE_UPDATE_AFTER_DOWNLOAD", + "DefaultValue": "false", + "Value": "false", + "DataType": "java.lang.Boolean", + "GroupName": "CONSOLE_UPDATE_SETTING_GROUP" + }, + { + "Name": "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION", + "DefaultValue": "false", + "Value": "false", + "DataType": "java.lang.Boolean", + "GroupName": "" + }, + { + "Name": "REPORTS_MAX_RESULTS_LIMIT", + "DefaultValue": "0", + "Value": "2000000000000000000000000", + "DataType": "java.lang.Integer", + "GroupName": "" + }, + { + "Name": "EMAIL_SENDER", + "DefaultValue": "omcadmin@dell.com", + "Value": "admin1@dell.com@dell.com@dell.com", + "DataType": "java.lang.String", + "GroupName": "" + }, + { + "Name": "MX7000_ONBOARDING_PREF", + "DefaultValue": "all", + "Value": "test_chassis", + "DataType": "java.lang.String", + "GroupName": "" + }, + { + "Name": "DISCOVERY_APPROVAL_POLICY", + "DefaultValue": "Automatic", + "Value": "Automatic_test", + "DataType": "java.lang.String", + "GroupName": "" + } + ] +error_info: + description: Details of the HTTP error. + returned: on HTTP error + type: dict + sample: + { + "error": { + "code": "Base.1.0.GeneralError", + "message": "A general error has occurred. See ExtendedInfo for more information.", + "@Message.ExtendedInfo": [ + { + "MessageId": "CGEN1006", + "RelatedProperties": [], + "Message": "Unable to complete the request because the resource URI does not exist or is not implemented.", + "MessageArgs": [], + "Severity": "Critical", + "Resolution": "Enter a valid URI and retry the operation." + } + ] + } + } +''' + +import json +from ssl import SSLError +from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError +from ansible.module_utils.urls import ConnectionError +from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params +from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import strip_substr_dict + +SUCCESS_MSG = "Successfully updated the Console Preferences settings." +SETTINGS_URL = "ApplicationService/Settings" +NO_CHANGES = "No changes found to be applied." +CHANGES_FOUND = "Changes found to be applied." +HEALTH_CHECK_UNIT_REQUIRED = "The health check unit is required when health check interval is specified." +HEALTH_CHECK_INTERVAL_REQUIRED = "The health check interval is required when health check unit is specified." +HEALTH_CHECK_INTERVAL_INVALID = "The health check interval specified is invalid for the {0}" +JOB_URL = "JobService/Jobs" +CIFS_URL = "ApplicationService/Actions/ApplicationService.UpdateShareTypeSettings" +CONSOLE_SETTINGS_VALUES = ["DATA_PURGE_INTERVAL", "EMAIL_SENDER", "TRAP_FORWARDING_SETTING", + "MX7000_ONBOARDING_PREF", "REPORTS_MAX_RESULTS_LIMIT", + "DISCOVERY_APPROVAL_POLICY", "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION", + "DEVICE_PREFERRED_NAME", "INVALID_DEVICE_HOSTNAME", "COMMON_MAC_ADDRESSES", + "CONSOLE_CONNECTION_SETTING", "MIN_PROTOCOL_VERSION", "SHARE_TYPE"] + + +def job_details(rest_obj): + query_param = {"$filter": "JobType/Id eq 6"} + job_resp = rest_obj.invoke_request("GET", JOB_URL, query_param=query_param) + job_data = job_resp.json_data.get('value') + tmp_list = [x["Id"] for x in job_data] + sorted_id = sorted(tmp_list) + latest_job = [val for val in job_data if val["Id"] == sorted_id[-1]] + return latest_job[0] + + +def create_job(module): + schedule = None + job_payload = None + device_health = module.params.get("device_health") + if device_health: + if device_health.get("health_check_interval_unit") == "Hourly": + schedule = "0 0 0/" + str(device_health.get("health_check_interval")) + " 1/1 * ? *" + elif device_health.get("health_check_interval_unit") == "Minutes": + schedule = "0 0/" + str(device_health.get("health_check_interval")) + " * 1/1 * ? *" + job_payload = {"Id": 0, + "JobName": "Global Health Task", + "JobDescription": "Global Health Task", + "Schedule": schedule, + "State": "Enabled", + "JobType": {"Id": 6, "Name": "Health_Task"}, + "Params": [{"Key": "metricType", "Value": "40, 50"}], + "Targets": [{"Id": 500, "Data": "", "TargetType": {"Id": 6000, "Name": "GROUP"}}]} + return job_payload, schedule + + +def fetch_cp_settings(rest_obj): + final_resp = rest_obj.invoke_request("GET", SETTINGS_URL) + ret_data = final_resp.json_data.get('value') + return ret_data + + +def create_payload_dict(curr_payload): + payload = {} + for pay in curr_payload: + payload[pay["Name"]] = pay + return payload + + +def create_payload(module, curr_payload): + console_setting_list = [] + updated_payload = {"ConsoleSetting": []} + payload_dict = create_payload_dict(curr_payload) + get_sid = module.params.get("server_initiated_discovery") + get_ds = module.params.get("discovery_settings") + get_mcs = module.params.get("metrics_collection_settings") + get_email = module.params.get("email_sender_settings") + get_tff = module.params.get("trap_forwarding_format") + get_mx = module.params.get("mx7000_onboarding_preferences") + get_rrl = module.params.get("report_row_limit") + get_dh = module.params.get("device_health") + get_bas = module.params.get("builtin_appliance_share") + if get_mcs: + payload1 = payload_dict["DATA_PURGE_INTERVAL"].copy() + payload1["Value"] = get_mcs + console_setting_list.append(payload1) + if get_email: + payload2 = payload_dict["EMAIL_SENDER"].copy() + payload2["Value"] = get_email + console_setting_list.append(payload2) + if get_tff: + dict1 = {"Original": "AsIs", "Normalized": "Normalized"} + payload3 = payload_dict["TRAP_FORWARDING_SETTING"].copy() + payload3["Value"] = dict1.get(get_tff) + console_setting_list.append(payload3) + if get_mx: + payload4 = payload_dict["MX7000_ONBOARDING_PREF"].copy() + payload4["Value"] = get_mx + console_setting_list.append(payload4) + if get_rrl: + payload5 = payload_dict["REPORTS_MAX_RESULTS_LIMIT"].copy() + payload5["Value"] = get_rrl + console_setting_list.append(payload5) + if get_sid: + if get_sid.get("device_discovery_approval_policy"): + payload6 = payload_dict["DISCOVERY_APPROVAL_POLICY"].copy() + payload6["Value"] = get_sid.get("device_discovery_approval_policy") + console_setting_list.append(payload6) + if get_sid.get("set_trap_destination") is not None: + payload7 = payload_dict["NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION"].copy() + payload7["Value"] = get_sid.get("set_trap_destination") + console_setting_list.append(payload7) + if get_ds: + if get_ds.get("general_device_naming") and get_ds.get("server_device_naming"): + value = "PREFER_" + module.params["discovery_settings"]["general_device_naming"] + "," + "PREFER_" +\ + get_ds["server_device_naming"] + payload8 = payload_dict["DEVICE_PREFERRED_NAME"].copy() + payload8["Value"] = value + console_setting_list.append(payload8) + elif get_ds.get("general_device_naming"): + payload9 = payload_dict["DEVICE_PREFERRED_NAME"].copy() + payload9["Value"] = "PREFER_" + get_ds["general_device_naming"] + console_setting_list.append(payload9) + elif get_ds.get("server_device_naming"): + payload10 = payload_dict["DEVICE_PREFERRED_NAME"].copy() + payload10["Value"] = "PREFER_" + get_ds["server_device_naming"] + console_setting_list.append(payload10) + if get_ds.get("invalid_device_hostname"): + payload11 = payload_dict["INVALID_DEVICE_HOSTNAME"].copy() + payload11["Value"] = get_ds.get("invalid_device_hostname") + console_setting_list.append(payload11) + if get_ds.get("common_mac_addresses"): + payload12 = payload_dict["COMMON_MAC_ADDRESSES"].copy() + payload12["Value"] = get_ds.get("common_mac_addresses") + console_setting_list.append(payload12) + if get_dh and get_dh.get("health_and_power_state_on_connection_lost"): + payload13 = payload_dict["CONSOLE_CONNECTION_SETTING"].copy() + payload13["Value"] = get_dh.get("health_and_power_state_on_connection_lost") + console_setting_list.append(payload13) + if get_bas and get_bas.get("share_options") == "CIFS": + payload14 = payload_dict["MIN_PROTOCOL_VERSION"].copy() + payload14["Value"] = get_bas.get("cifs_options") + console_setting_list.append(payload14) + updated_payload["ConsoleSetting"] = console_setting_list + return updated_payload, payload_dict + + +def create_cifs_payload(module, curr_payload): + console_setting_list = [] + updated_payload = {"ConsoleSetting": []} + payload_dict = create_payload_dict(curr_payload) + get_bas = module.params.get("builtin_appliance_share") + if get_bas and get_bas.get("share_options"): + payload = payload_dict["SHARE_TYPE"].copy() + payload["Value"] = get_bas.get("share_options") + console_setting_list.append(payload) + updated_payload["ConsoleSetting"] = console_setting_list + return updated_payload + + +def update_console_preferences(module, rest_obj, payload, payload_cifs, job_payload, job, payload_dict, schedule): + cifs_resp = None + job_final_resp = None + get_bas = module.params.get("builtin_appliance_share") + device_health = module.params.get("device_health") + [payload["ConsoleSetting"].remove(i) for i in payload["ConsoleSetting"] if i["Name"] == "SHARE_TYPE"] + if device_health and device_health.get("health_check_interval_unit") and job["Schedule"] != schedule: + job_final_resp = rest_obj.invoke_request("POST", JOB_URL, data=job_payload) + if get_bas and get_bas.get("share_options") and payload_dict["SHARE_TYPE"]["Value"] != \ + get_bas.get("share_options"): + cifs_resp = rest_obj.invoke_request("POST", CIFS_URL, data=payload_cifs) + final_resp = rest_obj.invoke_request("POST", SETTINGS_URL, data=payload) + return final_resp, cifs_resp, job_final_resp + + +def _diff_payload(curr_resp, update_resp, payload_cifs, schedule, job_det): + diff = 0 + update_resp["ConsoleSetting"].extend(payload_cifs["ConsoleSetting"]) + if schedule and job_det["Schedule"] != schedule: + diff += 1 + for i in curr_resp: + for j in update_resp["ConsoleSetting"]: + if i["Name"] == j["Name"]: + if isinstance(j["Value"], bool): + j["Value"] = str(j["Value"]).lower() + if isinstance(j["Value"], int): + j["Value"] = str(j["Value"]) + if i["Value"] != j["Value"]: + diff += 1 + return diff + + +def process_check_mode(module, diff): + if not diff: + module.exit_json(msg=NO_CHANGES) + elif diff and module.check_mode: + module.exit_json(msg=CHANGES_FOUND, changed=True) + + +def _validate_params(module): + error_message = _validate_health_check_interval(module) + if error_message: + module.fail_json(msg=error_message) + + +def _validate_health_check_interval(module): + error_message = None + device_health = module.params.get("device_health") + if device_health: + hci = device_health.get("health_check_interval") + hciu = device_health.get("health_check_interval_unit") + if hci and not hciu: + error_message = HEALTH_CHECK_UNIT_REQUIRED + if hciu and not hci: + error_message = HEALTH_CHECK_INTERVAL_REQUIRED + if hciu and hci: + if hciu == "Hourly" and (hci < 1 or hci > 23): + error_message = HEALTH_CHECK_INTERVAL_INVALID.format(hciu) + if hciu == "Minutes" and (hci < 1 or hci > 59): + error_message = HEALTH_CHECK_INTERVAL_INVALID.format(hciu) + return error_message + + +def main(): + device_health_opt = {"health_check_interval": {"type": "int", "required": False}, + "health_check_interval_unit": {"type": "str", "required": False, + "choices": ["Hourly", "Minutes"]}, + "health_and_power_state_on_connection_lost": {"type": "str", "required": False, + "choices": ["last_known", "unknown"]} + } + discovery_settings_opt = { + "general_device_naming": {"type": "str", "required": False, "default": "DNS", + "choices": ["DNS", "NETBIOS"]}, + "server_device_naming": {"type": "str", "required": False, "default": "IDRAC_SYSTEM_HOSTNAME", + "choices": ["IDRAC_HOSTNAME", "IDRAC_SYSTEM_HOSTNAME"]}, + "invalid_device_hostname": {"type": "str", "required": False}, + "common_mac_addresses": {"type": "str", "required": False} + } + server_initiated_discovery_opt = { + "device_discovery_approval_policy": {"type": "str", "required": False, "choices": ["Automatic", "Manual"]}, + "set_trap_destination": {"type": "bool", "required": False, }, + } + builtin_appliance_share_opt = { + "share_options": {"type": "str", "required": False, + "choices": ["CIFS", "HTTPS"]}, + "cifs_options": {"type": "str", "required": False, + "choices": ["V1", "V2"] + }, + } + + specs = { + "report_row_limit": {"required": False, "type": "int"}, + "device_health": {"required": False, "type": "dict", + "options": device_health_opt + }, + "discovery_settings": {"required": False, "type": "dict", + "options": discovery_settings_opt + }, + "server_initiated_discovery": {"required": False, "type": "dict", + "options": server_initiated_discovery_opt + }, + "mx7000_onboarding_preferences": {"required": False, "type": "str", "choices": ["all", "chassis"]}, + "builtin_appliance_share": {"required": False, "type": "dict", + "options": builtin_appliance_share_opt, + "required_if": [['share_options', "CIFS", ('cifs_options',)]] + }, + "email_sender_settings": {"required": False, "type": "str"}, + "trap_forwarding_format": {"required": False, "type": "str", "choices": ["Normalized", "Original"]}, + "metrics_collection_settings": {"required": False, "type": "int"}, + } + specs.update(ome_auth_params) + module = AnsibleModule(argument_spec=specs, + required_one_of=[["report_row_limit", "device_health", "discovery_settings", + "server_initiated_discovery", "mx7000_onboarding_preferences", + "builtin_appliance_share", "email_sender_settings", + "trap_forwarding_format", "metrics_collection_settings"]], + supports_check_mode=True, ) + + try: + _validate_params(module) + with RestOME(module.params, req_session=True) as rest_obj: + job = job_details(rest_obj) + job_payload, schedule = create_job(module) + curr_resp = fetch_cp_settings(rest_obj) + payload, payload_dict = create_payload(module, curr_resp) + cifs_payload = create_cifs_payload(module, curr_resp) + diff = _diff_payload(curr_resp, payload, cifs_payload, schedule, job) + process_check_mode(module, diff) + resp, cifs_resp, job_resp = update_console_preferences(module, rest_obj, payload, cifs_payload, + job_payload, job, payload_dict, schedule) + resp_req = fetch_cp_settings(rest_obj) + cp_list = [] + resp_data = list(filter(lambda d: d['Name'] in CONSOLE_SETTINGS_VALUES, resp_req)) + for cp in resp_data: + cp_data = strip_substr_dict(cp) + cp_list.append(cp_data) + module.exit_json(msg=SUCCESS_MSG, console_preferences=cp_list) + except HTTPError as err: + module.fail_json(msg=str(err), error_info=json.load(err)) + except URLError as err: + module.exit_json(msg=str(err), unreachable=True) + except (IOError, ValueError, SSLError, TypeError, ConnectionError, AttributeError, IndexError, KeyError, OSError) as err: + module.fail_json(msg=str(err), error_info=json.load(err)) + + +if __name__ == '__main__': + main() diff --git a/plugins/modules/ome_diagnostics.py b/plugins/modules/ome_diagnostics.py index 34ca56f38..9a6ca871d 100644 --- a/plugins/modules/ome_diagnostics.py +++ b/plugins/modules/ome_diagnostics.py @@ -3,7 +3,7 @@ # # Dell EMC OpenManage Ansible Modules -# Version 5.0.1 +# Version 5.2.0 # Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) @@ -120,10 +120,22 @@ - Test the availability of the network share location. - I(job_wait) and I(job_wait_timeout) options are not applicable for I(test_connection). default: False + lead_chassis_only: + type: bool + description: + - Extract the logs from Lead chassis only. + - I(lead_chassis_only) is only applicable when I(log_type) is C(application) on OpenManage Enterprise Modular. + default: False requirements: - "python >= 3.8.6" author: - "Felix Stephen (@felixs88)" + - "Sachin Apagundi(@sachin-apa)" +notes: + - Run this module from a system that has direct access to OpenManage Enterprise. + - This module performs the test connection and device validations. It does not create a job for copying the + logs in check mode and always reports as changes found. + - This module supports C(check_mode). """ @@ -265,6 +277,7 @@ DEVICE_URI = "DeviceService/Devices" DOMAIN_URI = "ManagementDomainService/Domains" EXE_HISTORY_URI = "JobService/Jobs({0})/ExecutionHistories" +CHANGES_FOUND = "Changes found to be applied." def group_validation(module, rest_obj): @@ -319,8 +332,22 @@ def extract_log_operation(module, rest_obj, device_lst=None): payload_params, target_params = [], [] log_type = module.params["log_type"] if log_type == "application": - resp = rest_obj.invoke_request("GET", DEVICE_URI, query_param={"$filter": "Type eq 2000"}) - resp_data = resp.json_data["value"] + lead_only = module.params["lead_chassis_only"] + resp_data = None + if lead_only: + domain_details = rest_obj.get_all_items_with_pagination(DOMAIN_URI) + key = "Id" + ch_device_id = None + for each_domain in domain_details["value"]: + if each_domain["DomainRoleTypeValue"] in ["LEAD", "STANDALONE"]: + ch_device_id = each_domain["DeviceId"] + if ch_device_id: + resp = rest_obj.invoke_request("GET", DEVICE_URI, + query_param={"$filter": "{0} eq {1}".format(key, ch_device_id)}) + resp_data = resp.json_data["value"] + else: + resp = rest_obj.invoke_request("GET", DEVICE_URI, query_param={"$filter": "Type eq 2000"}) + resp_data = resp.json_data["value"] if resp_data: for dev in resp_data: target_params.append({"Id": dev["Id"], "Data": "", @@ -401,6 +428,7 @@ def main(): "job_wait": {"required": False, "type": "bool", "default": True}, "job_wait_timeout": {"required": False, "type": "int", "default": 60}, "test_connection": {"required": False, "type": "bool", "default": False}, + "lead_chassis_only": {"required": False, "type": "bool", "default": False}, } specs.update(ome_auth_params) module = AnsibleModule( @@ -448,6 +476,10 @@ def main(): module.params.get("device_group_name") is None: valid_device = device_validation(module, rest_obj) + # exit if running in check mode + if module.check_mode: + module.exit_json(msg=CHANGES_FOUND, changed=True) + # extract log job operation response = extract_log_operation(module, rest_obj, device_lst=valid_device) message = "Export log job submitted successfully." diff --git a/plugins/modules/ome_profile.py b/plugins/modules/ome_profile.py index eac0f3118..d2f7a87c8 100644 --- a/plugins/modules/ome_profile.py +++ b/plugins/modules/ome_profile.py @@ -3,7 +3,7 @@ # # Dell EMC OpenManage Ansible Modules -# Version 5.0.1 +# Version 5.2.0 # Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) @@ -147,6 +147,8 @@ description: - List of attributes to be modified, when I(command) is C(modify). - List of attributes to be overridden when I(command) is C(assign). + - "Use the I(Id) If the attribute Id is available. If not, use the comma separated I (DisplayName). + For more details about using the I(DisplayName), see the example provided." type: list elements: dict Options: @@ -164,7 +166,7 @@ author: "Jagadeesh N V (@jagadeeshnv)" notes: - Run this module from a system that has direct access to DellEMC OpenManage Enterprise. - - This module does not support C(check_mode). + - This module supports C(check_mode). - C(assign) operation on a already assigned profile will not redeploy. ''' @@ -237,10 +239,15 @@ Attributes: - Id: 4506 Value: "server attr 1" - IsIgnored: true + IsIgnored: false - Id: 4507 Value: "server attr 2" - IsIgnored: true + IsIgnored: false + # Enter the comma separated string as appearing in the Detailed view on GUI + # System -> Server Topology -> ServerTopology 1 Aisle Name + - DisplayName: 'System, Server Topology, ServerTopology 1 Aisle Name' + Value: Aisle 5 + IsIgnored: false - name: Delete a profile using profile name dellemc.openmanage.ome_profile: @@ -401,7 +408,11 @@ DEVICE_VIEW = "DeviceService/Devices" JOB_URI = "JobService/Jobs({job_id})" PROFILE_ACTION = "ProfileService/Actions/ProfileService.{action}" +PROFILE_ATTRIBUTES = "ProfileService/Profiles({profile_id})/AttributeDetails" PROFILE_NOT_FOUND = "Profile with the name '{name}' not found." +CHANGES_MSG = "Changes found to be applied." +NO_CHANGES_MSG = "No changes found to be applied." +SEPRTR = ',' def get_template_details(module, rest_obj): @@ -477,6 +488,71 @@ def get_network_iso_payload(module): return iso_payload +def recurse_subattr_list(subgroup, prefix, attr_detailed, attr_map, adv_list): + if isinstance(subgroup, list): + for each_sub in subgroup: + nprfx = "{0}{1}{2}".format(prefix, SEPRTR, each_sub.get("DisplayName")) + if each_sub.get("SubAttributeGroups"): + recurse_subattr_list(each_sub.get("SubAttributeGroups"), nprfx, attr_detailed, attr_map, adv_list) + else: + for attr in each_sub.get('Attributes'): + attr['prefix'] = nprfx + # case sensitive, remove whitespaces for optim + constr = "{0}{1}{2}".format(nprfx, SEPRTR, attr['DisplayName']) + if constr in adv_list: + attr_detailed[constr] = attr['AttributeId'] + attr_map[attr['AttributeId']] = attr + + +def get_subattr_all(attr_dtls, adv_list): + attr_detailed = {} + attr_map = {} + for each in attr_dtls: + recurse_subattr_list(each.get('SubAttributeGroups'), each.get('DisplayName'), attr_detailed, attr_map, adv_list) + return attr_detailed, attr_map + + +def attributes_check(module, rest_obj, inp_attr, profile_id): + diff = 0 + try: + resp = rest_obj.invoke_request("GET", PROFILE_ATTRIBUTES.format(profile_id=profile_id)) + attr_dtls = resp.json_data + disp_adv_list = inp_attr.get("Attributes", {}) + adv_list = [] + for attr in disp_adv_list: + if attr.get("DisplayName"): + split_k = str(attr.get("DisplayName")).split(SEPRTR) + trimmed = map(str.strip, split_k) + n_k = SEPRTR.join(trimmed) + adv_list.append(n_k) + attr_detailed, attr_map = get_subattr_all(attr_dtls.get('AttributeGroups'), adv_list) + payload_attr = inp_attr.get("Attributes", []) + rem_attrs = [] + for attr in payload_attr: + if attr.get("DisplayName"): + split_k = str(attr.get("DisplayName")).split(SEPRTR) + trimmed = map(str.strip, split_k) + n_k = SEPRTR.join(trimmed) + id = attr_detailed.get(n_k, "") + attr['Id'] = id + attr.pop("DisplayName", None) + else: + id = attr.get('Id') + if id: + ex_val = attr_map.get(id, {}) + if not ex_val: + rem_attrs.append(attr) + continue + if attr.get('Value') != ex_val.get("Value") or attr.get('IsIgnored') != ex_val.get("IsIgnored"): + diff = diff + 1 + for rem in rem_attrs: + payload_attr.remove(rem) + # module.exit_json(attr_detailed=attr_detailed, inp_attr=disp_adv_list, payload_attr=payload_attr, adv_list=adv_list) + except Exception: + diff = 1 + return diff + + def assign_profile(module, rest_obj): mparam = module.params payload = {} @@ -524,7 +600,10 @@ def assign_profile(module, rest_obj): ad_opts = mparam.get("attributes") for opt in ad_opts_list: if ad_opts and ad_opts.get(opt): + diff = attributes_check(module, rest_obj, ad_opts, prof['Id']) payload[opt] = ad_opts.get(opt) + if module.check_mode: + module.exit_json(msg=CHANGES_MSG, changed=True) resp = rest_obj.invoke_request('POST', PROFILE_ACTION.format(action=action), data=payload) res_dict = {'msg': msg, 'changed': True} if action == 'AssignProfile': @@ -562,6 +641,8 @@ def unassign_profile(module, rest_obj): module.fail_json(msg=PROFILE_NOT_FOUND.format(name=mparam.get('name'))) if mparam.get('filters'): payload = mparam.get('filters') + if module.check_mode: + module.exit_json(msg=CHANGES_MSG, changed=True) msg = "Successfully applied the unassign operation. No job was triggered." resp = rest_obj.invoke_request('POST', PROFILE_ACTION.format(action='UnassignProfiles'), data=payload) res_dict = {'msg': msg, 'changed': True} @@ -588,7 +669,8 @@ def create_profile(module, rest_obj): boot_iso_dict = get_network_iso_payload(module) if boot_iso_dict: payload["NetworkBootToIso"] = boot_iso_dict - # module.exit_json(msg=payload) + if module.check_mode: + module.exit_json(msg=CHANGES_MSG, changed=True) resp = rest_obj.invoke_request('POST', PROFILE_VIEW, data=payload) profile_id_list = resp.json_data module.exit_json(msg="Successfully created {0} profile(s).".format(len(profile_id_list)), @@ -615,19 +697,22 @@ def modify_profile(module, rest_obj): if boot_iso_dict: nest_diff = recursive_diff(boot_iso_dict, rdict) if nest_diff: - module.warn(json.dumps(nest_diff)) + # module.warn(json.dumps(nest_diff)) if nest_diff[0]: diff += 1 payload["NetworkBootToIso"] = boot_iso_dict ad_opts = mparam.get("attributes") if ad_opts and ad_opts.get("Attributes"): - payload["Attributes"] = ad_opts.get("Attributes") - diff += 1 + diff = diff + attributes_check(module, rest_obj, ad_opts, prof['Id']) + if ad_opts.get("Attributes"): + payload["Attributes"] = ad_opts.get("Attributes") payload['Id'] = prof['Id'] if diff: + if module.check_mode: + module.exit_json(msg=CHANGES_MSG, changed=True) resp = rest_obj.invoke_request('PUT', PROFILE_VIEW + "({0})".format(payload['Id']), data=payload) module.exit_json(msg="Successfully modified the profile.", changed=True) - module.exit_json(msg="No changes found to be applied.") + module.exit_json(msg=NO_CHANGES_MSG) def delete_profile(module, rest_obj): @@ -637,12 +722,16 @@ def delete_profile(module, rest_obj): if prof: if prof['ProfileState'] > 0: module.fail_json(msg="Profile has to be in an unassigned state for it to be deleted.") + if module.check_mode: + module.exit_json(msg=CHANGES_MSG, changed=True) resp = rest_obj.invoke_request('DELETE', PROFILE_VIEW + "({0})".format(prof['Id'])) module.exit_json(msg="Successfully deleted the profile.", changed=True) else: module.exit_json(msg=PROFILE_NOT_FOUND.format(name=mparam.get('name'))) if mparam.get('filters'): payload = mparam.get('filters') + if module.check_mode: + module.exit_json(msg=CHANGES_MSG, changed=True) resp = rest_obj.invoke_request('POST', PROFILE_ACTION.format(action='Delete'), data=payload) module.exit_json(msg="Successfully completed the delete operation.", changed=True) @@ -658,7 +747,7 @@ def migrate_profile(module, rest_obj): prof = get_profile(rest_obj, module) if prof: if target['Id'] == prof['TargetId']: - module.exit_json(msg="No changes found to be applied.") + module.exit_json(msg=NO_CHANGES_MSG) try: resp = rest_obj.invoke_request('POST', PROFILE_ACTION.format(action='GetInvalidTargetsForAssignProfile'), data={'Id': prof['Id']}) @@ -668,6 +757,8 @@ def migrate_profile(module, rest_obj): resp = None if prof['ProfileState'] == 4: # migrate applicable in deployed state only payload['ProfileId'] = prof['Id'] + if module.check_mode: + module.exit_json(msg=CHANGES_MSG, changed=True) resp = rest_obj.invoke_request('POST', PROFILE_ACTION.format(action='MigrateProfile'), data=payload) msg = "Successfully applied the migrate operation." res_dict = {'msg': msg, 'changed': True} @@ -756,7 +847,7 @@ def main(): ['name', 'filters'], ['device_id', 'device_service_tag'], ['template_name', 'template_id']], - supports_check_mode=False) + supports_check_mode=True) try: with RestOME(module.params, req_session=True) as rest_obj: profile_operation(module, rest_obj) diff --git a/plugins/modules/ome_template.py b/plugins/modules/ome_template.py index 5ce3ad188..8c5fa98b3 100644 --- a/plugins/modules/ome_template.py +++ b/plugins/modules/ome_template.py @@ -3,7 +3,7 @@ # # Dell EMC OpenManage Ansible Modules -# Version 5.0.1 +# Version 5.2.0 # Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) @@ -88,7 +88,9 @@ C(modify), C(deploy), C(import), and C(clone) operations. It takes the following attributes. - >- Attributes: List of dictionaries of attributes (if any) to be modified in the deployment template. This is - applicable when I(command) is C(deploy) and C(modify). + applicable when I(command) is C(deploy) and C(modify). Use the I(Id) If the attribute Id is available. + If not, use the comma separated I (DisplayName). For more details about using the I(DisplayName), + see the example provided. - >- Name: Name of the template. This is mandatory when I(command) is C(create), C(import), C(clone), and optional when I(command) is C(modify). @@ -123,7 +125,7 @@ author: "Jagadeesh N V (@jagadeeshnv)" notes: - Run this module from a system that has direct access to DellEMC OpenManage Enterprise. - - This module does not support C(check_mode). + - This module supports C(check_mode). ''' EXAMPLES = r''' @@ -158,6 +160,28 @@ Value: "Test Attribute" IsIgnored: false +- name: Modify template name, description, and attribute using detailed view + dellemc.openmanage.ome_template: + hostname: "192.168.0.1" + username: "username" + password: "password" + ca_path: "/path/to/ca_cert.pem" + command: "modify" + template_id: 12 + attributes: + Name: "New Custom Template" + Description: "Custom Template Description" + Attributes: + # Enter the comma separated string as appearing in the Detailed view on GUI + # NIC -> NIC.Integrated.1-1-1 -> NIC Configuration -> Wake On LAN1 + - DisplayName: 'NIC, NIC.Integrated.1-1-1, NIC Configuration, Wake On LAN' + Value: Enabled + IsIgnored: false + # System -> LCD Configuration -> LCD 1 User Defined String for LCD + - DisplayName: 'System, LCD Configuration, LCD 1 User Defined String for LCD' + Value: LCD str by OMAM + IsIgnored: false + - name: Deploy template on multiple devices dellemc.openmanage.ome_template: hostname: "192.168.0.1" @@ -370,7 +394,7 @@ attributes: Name: "Imported Template Name" Type: 2 - Content: "{{ lookup('ansible.builtin.file.', '/path/to/xmlfile') }}" + Content: "{{ lookup('ansible.builtin.file', '/path/to/xmlfile') }}" - name: "Deploy template and Operating System (OS) on multiple devices." dellemc.openmanage.ome_template: @@ -461,6 +485,14 @@ \nReady\nNo \n\n\nReady \nNo\n\n" +devices_assigned: + description: Mapping of devices with the templates already deployed on them. + returned: I(command) is C(deploy) + type: dict + sample: { + "10362": 28, + "10312": 23 + } error_info: description: Details of the HTTP Error. returned: on HTTP error @@ -489,13 +521,31 @@ from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError from ansible.module_utils.urls import ConnectionError, SSLValidationError +from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import apply_diff_key TEMPLATES_URI = "TemplateService/Templates" TEMPLATE_PATH = "TemplateService/Templates({template_id})" -TEMPALTE_ACTION = "TemplateService/Actions/TemplateService.{op}" +TEMPLATE_ACTION = "TemplateService/Actions/TemplateService.{op}" +TEMPLATE_ATTRIBUTES = "TemplateService/Templates({template_id})/AttributeDetails" DEVICE_URI = "DeviceService/Devices" GROUP_URI = "GroupService/Groups" +PROFILE_URI = "ProfileService/Profiles" +SEPRTR = ',' +NO_CHANGES_MSG = "No changes found to be applied." +CHANGES_FOUND = "Changes found to be applied." +TEMPLATE_NAME_EXISTS = "Template with name '{name}' already exists." +DEPLOY_DEV_ASSIGNED = "The device(s) '{dev}' have been assigned the template(s) '{temp}' " \ + "respectively. Please unassign the profiles from the devices." + + +def get_profiles(rest_obj): + try: + resp = rest_obj.invoke_request('GET', PROFILE_URI) + profile_list = resp.json_data.get("value") + except Exception: + profile_list = [] + return profile_list def get_group_devices_all(rest_obj, uri): @@ -571,35 +621,128 @@ def get_type_id_valid(rest_obj, typeid): if resp.success and resp.json_data.get('value'): tlist = resp.json_data.get('value', []) for xtype in tlist: - if xtype.get('Id') == typeid: # use Name if str is passed + if xtype.get('Id') == typeid: return True return False -def get_create_payload(module_params, deviceid, view_id): +def get_template_by_name(template_name, module, rest_obj): + template = {} + template_path = TEMPLATES_URI + query_param = {"$filter": "Name eq '{0}'".format(template_name)} + template_req = rest_obj.invoke_request("GET", template_path, query_param=query_param) + for each in template_req.json_data.get('value'): + if each['Name'] == template_name: + template = each + break + return template + + +def recurse_subattr_list(subgroup, prefix, attr_detailed, attr_map, adv_list): + if isinstance(subgroup, list): + for each_sub in subgroup: + nprfx = "{0}{1}{2}".format(prefix, SEPRTR, each_sub.get("DisplayName")) + if each_sub.get("SubAttributeGroups"): + recurse_subattr_list(each_sub.get("SubAttributeGroups"), nprfx, attr_detailed, attr_map, adv_list) + else: + for attr in each_sub.get('Attributes'): + attr['prefix'] = nprfx + # case sensitive, remove whitespaces for optim + constr = "{0}{1}{2}".format(nprfx, SEPRTR, attr['DisplayName']) + if constr in adv_list: + attr_detailed[constr] = attr['AttributeId'] + attr_map[attr['AttributeId']] = attr + + +def get_subattr_all(attr_dtls, adv_list): + attr_detailed = {} + attr_map = {} + for each in attr_dtls: + recurse_subattr_list(each.get('SubAttributeGroups'), each.get('DisplayName'), attr_detailed, attr_map, adv_list) + return attr_detailed, attr_map + + +def attributes_check(module, rest_obj, inp_attr, template_id): + diff = 0 + try: + resp = rest_obj.invoke_request("GET", TEMPLATE_ATTRIBUTES.format(template_id=template_id)) + attr_dtls = resp.json_data + disp_adv_list = inp_attr.get("Attributes", {}) + adv_list = [] + for attr in disp_adv_list: + if attr.get("DisplayName"): + split_k = str(attr.get("DisplayName")).split(SEPRTR) + trimmed = map(str.strip, split_k) + n_k = SEPRTR.join(trimmed) + adv_list.append(n_k) + attr_detailed, attr_map = get_subattr_all(attr_dtls.get('AttributeGroups'), adv_list) + payload_attr = inp_attr.get("Attributes", []) + rem_attrs = [] + for attr in payload_attr: + if attr.get("DisplayName"): + split_k = str(attr.get("DisplayName")).split(SEPRTR) + trimmed = map(str.strip, split_k) + n_k = SEPRTR.join(trimmed) + id = attr_detailed.get(n_k, "") + attr['Id'] = id + attr.pop("DisplayName", None) + else: + id = attr.get('Id') + if id: + ex_val = attr_map.get(id, {}) + if not ex_val: + rem_attrs.append(attr) + continue + if attr.get('Value') != ex_val.get("Value") or attr.get('IsIgnored') != ex_val.get("IsIgnored"): + diff = diff + 1 + for rem in rem_attrs: + payload_attr.remove(rem) + # module.exit_json(attr_detailed=attr_detailed, inp_attr=disp_adv_list, payload_attr=payload_attr, adv_list=adv_list) + except Exception: + diff = 1 + return diff + + +def get_create_payload(module, rest_obj, deviceid, view_id): create_payload = {"Fqdds": "All", "ViewTypeId": view_id} - if isinstance(module_params.get("attributes"), dict): - attrib_dict = module_params.get("attributes").copy() - typeid = attrib_dict.get("Type") and attrib_dict.get("Type") or attrib_dict.get("TypeId") + attrib_dict = module.params.get("attributes").copy() + if isinstance(attrib_dict, dict): + typeid = attrib_dict.get("Type") if attrib_dict.get("Type") else attrib_dict.get("TypeId") if typeid: create_payload["TypeId"] = typeid attrib_dict.pop("Type", None) # remove if exists as it is not required for create payload create_payload.update(attrib_dict) + template = get_template_by_name(attrib_dict.get("Name"), module, rest_obj) + if template: + module.exit_json(msg=TEMPLATE_NAME_EXISTS.format(name=attrib_dict.get("Name"))) create_payload["SourceDeviceId"] = int(deviceid) return create_payload -def get_modify_payload(module_params, template_id, template_dict): +def get_modify_payload(module, rest_obj, template_dict): modify_payload = {} - if isinstance(module_params.get("attributes"), dict): - modify_payload.update(module_params.get("attributes")) - modify_payload['Id'] = template_id - # Update with old template values - if not modify_payload.get("Name"): - modify_payload["Name"] = template_dict["Name"] - if not modify_payload.get("Description"): - modify_payload["Description"] = template_dict["Description"] + attrib_dict = module.params.get("attributes") + attrib_dict['Id'] = template_dict.get('Id') + modify_payload["Name"] = template_dict["Name"] + diff = 0 + if attrib_dict.get("Name", template_dict["Name"]) != template_dict["Name"]: + template = get_template_by_name(attrib_dict.get("Name"), module, rest_obj) + if template: + module.exit_json(msg=TEMPLATE_NAME_EXISTS.format(name=attrib_dict.get("Name"))) + modify_payload["Name"] = attrib_dict.get("Name") + diff = diff + 1 + modify_payload["Description"] = template_dict["Description"] + diff = diff + apply_diff_key(attrib_dict, modify_payload, ["Description"]) + # check attributes + if attrib_dict.get("Attributes"): + diff = diff + attributes_check(module, rest_obj, attrib_dict, template_dict.get('Id')) + + if not diff: + module.exit_json(msg=NO_CHANGES_MSG) + if isinstance(attrib_dict, dict): + modify_payload.update(attrib_dict) + # module.exit_json(attrib_dict=attrib_dict, modify_payload=modify_payload) return modify_payload @@ -616,9 +759,12 @@ def get_import_payload(module, rest_obj, view_id): attrib_dict = module.params.get("attributes").copy() import_payload = {} import_payload["Name"] = attrib_dict.pop("Name") + template = get_template_by_name(import_payload["Name"], module, rest_obj) + if template: + module.exit_json(msg=TEMPLATE_NAME_EXISTS.format(name=import_payload["Name"])) import_payload["ViewTypeId"] = view_id import_payload["Type"] = 2 - typeid = attrib_dict.get("Type") and attrib_dict.get("Type") or attrib_dict.get("TypeId") + typeid = attrib_dict.get("Type") if attrib_dict.get("Type") else attrib_dict.get("TypeId") if typeid: if get_type_id_valid(rest_obj, typeid): import_payload["Type"] = typeid # Type is mandatory for import @@ -631,11 +777,14 @@ def get_import_payload(module, rest_obj, view_id): return import_payload -def get_clone_payload(module_params, template_id, view_id): - attrib_dict = module_params.get("attributes").copy() +def get_clone_payload(module, rest_obj, template_id, view_id): + attrib_dict = module.params.get("attributes").copy() clone_payload = {} clone_payload["SourceTemplateId"] = template_id clone_payload["NewTemplateName"] = attrib_dict.pop("Name") + template = get_template_by_name(clone_payload["NewTemplateName"], module, rest_obj) + if template: + module.exit_json(msg=TEMPLATE_NAME_EXISTS.format(name=clone_payload["NewTemplateName"])) clone_payload["ViewTypeId"] = view_id if isinstance(attrib_dict, dict): clone_payload.update(attrib_dict) @@ -652,72 +801,84 @@ def get_template_by_id(module, rest_obj, template_id): " requested template is not present.") -def get_template_by_name(template_name, module, rest_obj): - """Filter out specific template based on name, and it returns template_id. - - :param template_name: string - :param module: dictionary - :param rest_obj: object - :return: template_id: integer - """ - template_id = None - template = None - template_path = TEMPLATES_URI - query_param = {"$filter": "Name eq '{0}'".format(template_name)} - template_req = rest_obj.invoke_request("GET", template_path, query_param=query_param) - for each in template_req.json_data.get('value'): - if each['Name'] == template_name: - template_id = each['Id'] - template = each - break - else: - fail_module(module, msg="Unable to complete the operation because the" - " requested template with name {0} is not present.".format(template_name)) - return template, template_id +def get_template_details(module, rest_obj): + id = module.params.get('template_id') + query_param = {"$filter": "Id eq {0}".format(id)} + srch = 'Id' + if not id: + id = module.params.get('template_name') + query_param = {"$filter": "Name eq '{0}'".format(id)} + srch = 'Name' + template = {} + resp = rest_obj.invoke_request('GET', TEMPLATES_URI, query_param=query_param) + if resp.success and resp.json_data.get('value'): + tlist = resp.json_data.get('value', []) + for xtype in tlist: + if xtype.get(srch) == id: + template = xtype + return template def _get_resource_parameters(module, rest_obj): command = module.params.get("command") rest_method = 'POST' payload = {} - template_id = module.params.get("template_id") - template_name = module.params.get("template_name") - if template_name: - template, template_id = get_template_by_name(template_name, module, rest_obj) - if command not in ["import", "create"] and template_id is None: + template = get_template_details(module, rest_obj) + template_id = template.get('Id') + # template_name = template.get('Name') + if command not in ["import", "create", "delete"] and not template: fail_module(module, msg="Enter a valid template_name or template_id") if command == "create": devid_list = get_device_ids(module, rest_obj) if len(devid_list) != 1: fail_module(module, msg="Create template requires only one reference device") view_id = get_view_id(rest_obj, module.params['template_view_type']) - payload = get_create_payload(module.params, devid_list[0], view_id) + payload = get_create_payload(module, rest_obj, devid_list[0], view_id) path = TEMPLATES_URI + elif command == 'import': + view_id = get_view_id(rest_obj, module.params['template_view_type']) + path = TEMPLATE_ACTION.format(op="Import") + payload = get_import_payload(module, rest_obj, view_id) + elif command == "delete": + if not template: + module.exit_json(msg=NO_CHANGES_MSG) + path = TEMPLATE_PATH.format(template_id=template_id) + rest_method = 'DELETE' elif command == "modify": path = TEMPLATE_PATH.format(template_id=template_id) template_dict = get_template_by_id(module, rest_obj, template_id) - payload = get_modify_payload(module.params, template_id, template_dict) + payload = get_modify_payload(module, rest_obj, template_dict) rest_method = 'PUT' - elif command == "delete": - path = TEMPLATE_PATH.format(template_id=template_id) - rest_method = 'DELETE' elif command == "export": - path = TEMPALTE_ACTION.format(op="Export") + path = TEMPLATE_ACTION.format(op="Export") payload = {'TemplateId': template_id} elif command == "deploy": devid_list = get_device_ids(module, rest_obj) if not devid_list: fail_module(module, msg="There are no devices provided for deploy operation") - path = TEMPALTE_ACTION.format(op="Deploy") + profile_list = get_profiles(rest_obj) + dev_temp_map = {} + for prof in profile_list: + target = prof["TargetId"] + if prof["ProfileState"] > 0 and target in devid_list: + if template_id == prof['TemplateId']: # already same template deployed + devid_list.remove(target) + else: + dev_temp_map[prof["TargetId"]] = prof['TemplateId'] + if dev_temp_map: + module.exit_json(devices_assigned=dev_temp_map, + msg=DEPLOY_DEV_ASSIGNED.format(dev=','.join(map(str, dev_temp_map.keys())), + temp=','.join(map(str, dev_temp_map.values())))) + if not devid_list: + module.exit_json(msg=NO_CHANGES_MSG) + path = TEMPLATE_ACTION.format(op="Deploy") payload = get_deploy_payload(module.params, devid_list, template_id) elif command == "clone": view_id = get_view_id(rest_obj, module.params['template_view_type']) - path = TEMPALTE_ACTION.format(op="Clone") - payload = get_clone_payload(module.params, template_id, view_id) - else: - view_id = get_view_id(rest_obj, module.params['template_view_type']) - path = TEMPALTE_ACTION.format(op="Import") - payload = get_import_payload(module, rest_obj, view_id) + path = TEMPLATE_ACTION.format(op="Clone") + payload = get_clone_payload(module, rest_obj, template_id, view_id) + if module.check_mode: + module.exit_json(msg=CHANGES_FOUND, changed=True) return path, payload, rest_method @@ -809,12 +970,13 @@ def main(): ['command', 'deploy', ['device_id', 'device_service_tag', 'device_group_names'], True], ], mutually_exclusive=[["template_id", "template_name"]], - supports_check_mode=False) + supports_check_mode=True) try: _validate_inputs(module) with RestOME(module.params, req_session=True) as rest_obj: path, payload, rest_method = _get_resource_parameters(module, rest_obj) + # module.exit_json(payload=payload, path=path) resp = rest_obj.invoke_request(rest_method, path, data=payload) if resp.success: exit_module(module, resp) diff --git a/tests/README.md b/tests/README.md index 0afa9d64d..f66cdd59d 100644 --- a/tests/README.md +++ b/tests/README.md @@ -26,12 +26,10 @@ You can execute them manually by using any tool of your choice, like `pytest` or * Edit the copied `utils.py` to refer the above `compat` package as below: ```python from units.compat import unittest - from units.compat.mock import patch # Replace the above lines in utils.py as below from ansible_collections.dellemc.openmanage.tests.unit.compat import unittest - from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import patch ``` * To install `ansible-test` requirements use ``` diff --git a/tests/unit/plugins/module_utils/test_ome.py b/tests/unit/plugins/module_utils/test_ome.py index 244d42d7c..fc0f0be53 100644 --- a/tests/unit/plugins/module_utils/test_ome.py +++ b/tests/unit/plugins/module_utils/test_ome.py @@ -2,8 +2,8 @@ # # Dell EMC OpenManage Ansible Modules -# Version 2.1 -# Copyright (C) 2019-2020 Dell Inc. +# Version 5.2.0 +# Copyright (C) 2019-2022 Dell Inc. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # All rights reserved. Dell, EMC, and other trademarks are trademarks of Dell Inc. or its subsidiaries. @@ -18,7 +18,7 @@ from ansible.module_utils.urls import ConnectionError, SSLValidationError from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock +from mock import MagicMock import json MODULE_UTIL_PATH = 'ansible_collections.dellemc.openmanage.plugins.module_utils.' diff --git a/tests/unit/plugins/modules/common.py b/tests/unit/plugins/modules/common.py index 1631e2ad7..0cc124f9b 100644 --- a/tests/unit/plugins/modules/common.py +++ b/tests/unit/plugins/modules/common.py @@ -2,8 +2,8 @@ # # Dell EMC OpenManage Ansible Modules -# Version 2.1.2 -# Copyright (C) 2019-2020 Dell Inc. +# Version 5.2.0 +# Copyright (C) 2019-2022 Dell Inc. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # All rights reserved. Dell, EMC, and other trademarks are trademarks of Dell Inc. or its subsidiaries. @@ -17,7 +17,7 @@ import pytest from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.utils import set_module_args, AnsibleFailJson, \ AnsibleExitJson -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock +from mock import MagicMock import ast diff --git a/tests/unit/plugins/modules/conftest.py b/tests/unit/plugins/modules/conftest.py index 9c06bd6d6..e6f9ae46e 100644 --- a/tests/unit/plugins/modules/conftest.py +++ b/tests/unit/plugins/modules/conftest.py @@ -2,7 +2,7 @@ # # Dell EMC OpenManage Ansible Modules -# Version 5.0.0 +# Version 5.2.0 # Copyright (C) 2019-2022 Dell Inc. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) @@ -18,7 +18,7 @@ from ansible.module_utils import basic from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.utils import set_module_args, exit_json, \ fail_json, AnsibleFailJson, AnsibleExitJson -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock +from mock import MagicMock MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.' MODULE_UTIL_PATH = 'ansible_collections.dellemc.openmanage.plugins.module_utils.' diff --git a/tests/unit/plugins/modules/test_dellemc_configure_idrac_eventing.py b/tests/unit/plugins/modules/test_dellemc_configure_idrac_eventing.py index de152b187..891719759 100644 --- a/tests/unit/plugins/modules/test_dellemc_configure_idrac_eventing.py +++ b/tests/unit/plugins/modules/test_dellemc_configure_idrac_eventing.py @@ -2,8 +2,8 @@ # # Dell EMC OpenManage Ansible Modules -# Version 3.0.0 -# Copyright (C) 2020-2021 Dell Inc. or its subsidiaries. All Rights Reserved. +# Version 5.2.0 +# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # @@ -15,7 +15,7 @@ import pytest from ansible_collections.dellemc.openmanage.plugins.modules import dellemc_configure_idrac_eventing from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, patch, Mock, PropertyMock +from mock import MagicMock, patch, Mock, PropertyMock from pytest import importorskip importorskip("omsdk.sdkfile") diff --git a/tests/unit/plugins/modules/test_dellemc_configure_idrac_services.py b/tests/unit/plugins/modules/test_dellemc_configure_idrac_services.py index 8b7346fa3..274f4a40a 100644 --- a/tests/unit/plugins/modules/test_dellemc_configure_idrac_services.py +++ b/tests/unit/plugins/modules/test_dellemc_configure_idrac_services.py @@ -2,8 +2,8 @@ # # Dell EMC OpenManage Ansible Modules -# Version 3.0.0 -# Copyright (C) 2020-2021 Dell Inc. or its subsidiaries. All Rights Reserved. +# Version 5.2.0 +# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # @@ -15,7 +15,7 @@ import pytest from ansible_collections.dellemc.openmanage.plugins.modules import dellemc_configure_idrac_services from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, patch, Mock +from mock import MagicMock, patch, Mock from pytest import importorskip importorskip("omsdk.sdkfile") diff --git a/tests/unit/plugins/modules/test_dellemc_get_firmware_inventory.py b/tests/unit/plugins/modules/test_dellemc_get_firmware_inventory.py index 53c67938a..657f89e49 100644 --- a/tests/unit/plugins/modules/test_dellemc_get_firmware_inventory.py +++ b/tests/unit/plugins/modules/test_dellemc_get_firmware_inventory.py @@ -2,8 +2,8 @@ # # Dell EMC OpenManage Ansible Modules -# Version 2.1.4 -# Copyright (C) 2020 Dell Inc. or its subsidiaries. All Rights Reserved. +# Version 5.2.0 +# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # @@ -15,7 +15,7 @@ import pytest from ansible_collections.dellemc.openmanage.plugins.modules import dellemc_get_firmware_inventory from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, PropertyMock +from mock import MagicMock, PropertyMock from pytest import importorskip importorskip("omsdk.sdkfile") diff --git a/tests/unit/plugins/modules/test_dellemc_get_system_inventory.py b/tests/unit/plugins/modules/test_dellemc_get_system_inventory.py index 39f8a91e7..c398c9f8a 100644 --- a/tests/unit/plugins/modules/test_dellemc_get_system_inventory.py +++ b/tests/unit/plugins/modules/test_dellemc_get_system_inventory.py @@ -2,8 +2,8 @@ # # Dell EMC OpenManage Ansible Modules -# Version 2.1.1 -# Copyright (C) 2020 Dell Inc. or its subsidiaries. All Rights Reserved. +# Version 5.2.0 +# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # @@ -15,7 +15,7 @@ import pytest from ansible_collections.dellemc.openmanage.plugins.modules import dellemc_get_system_inventory from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, Mock +from mock import MagicMock, Mock from pytest import importorskip importorskip("omsdk.sdkfile") diff --git a/tests/unit/plugins/modules/test_dellemc_idrac_lc_attributes.py b/tests/unit/plugins/modules/test_dellemc_idrac_lc_attributes.py index bdaf69142..2f2ac720a 100644 --- a/tests/unit/plugins/modules/test_dellemc_idrac_lc_attributes.py +++ b/tests/unit/plugins/modules/test_dellemc_idrac_lc_attributes.py @@ -2,8 +2,8 @@ # # Dell EMC OpenManage Ansible Modules -# Version 2.1.4 -# Copyright (C) 2020 Dell Inc. or its subsidiaries. All Rights Reserved. +# Version 5.2.0 +# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # @@ -15,7 +15,7 @@ import pytest from ansible_collections.dellemc.openmanage.plugins.modules import dellemc_idrac_lc_attributes from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, patch, Mock +from mock import MagicMock, patch, Mock from pytest import importorskip importorskip("omsdk.sdkfile") diff --git a/tests/unit/plugins/modules/test_dellemc_idrac_storage_volume.py b/tests/unit/plugins/modules/test_dellemc_idrac_storage_volume.py index 88bcc3a37..c3a0dff19 100644 --- a/tests/unit/plugins/modules/test_dellemc_idrac_storage_volume.py +++ b/tests/unit/plugins/modules/test_dellemc_idrac_storage_volume.py @@ -2,8 +2,8 @@ # # Dell EMC OpenManage Ansible Modules -# Version 2.1.4 -# Copyright (C) 2020 Dell Inc. or its subsidiaries. All Rights Reserved. +# Version 5.2.0 +# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # @@ -16,7 +16,7 @@ import os from ansible_collections.dellemc.openmanage.plugins.modules import dellemc_idrac_storage_volume from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, patch, Mock +from mock import MagicMock, patch, Mock from pytest import importorskip importorskip("omsdk.sdkfile") diff --git a/tests/unit/plugins/modules/test_dellemc_system_lockdown_mode.py b/tests/unit/plugins/modules/test_dellemc_system_lockdown_mode.py index cb658beec..eb733694e 100644 --- a/tests/unit/plugins/modules/test_dellemc_system_lockdown_mode.py +++ b/tests/unit/plugins/modules/test_dellemc_system_lockdown_mode.py @@ -2,8 +2,8 @@ # # Dell EMC OpenManage Ansible Modules -# Version 3.0.0 -# Copyright (C) 2020-2021 Dell Inc. or its subsidiaries. All Rights Reserved. +# Version 5.2.0 +# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # @@ -15,7 +15,7 @@ import pytest from ansible_collections.dellemc.openmanage.plugins.modules import dellemc_system_lockdown_mode from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, Mock +from mock import MagicMock, Mock from pytest import importorskip importorskip("omsdk.sdkfile") diff --git a/tests/unit/plugins/modules/test_idrac_bios.py b/tests/unit/plugins/modules/test_idrac_bios.py index 4bf6853dc..157b67b1d 100644 --- a/tests/unit/plugins/modules/test_idrac_bios.py +++ b/tests/unit/plugins/modules/test_idrac_bios.py @@ -2,8 +2,8 @@ # # Dell EMC OpenManage Ansible Modules -# Version 3.0.0 -# Copyright (C) 2018-2020 Dell Inc. or its subsidiaries. All Rights Reserved. +# Version 5.2.0 +# Copyright (C) 2018-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # @@ -16,8 +16,8 @@ import json from ansible_collections.dellemc.openmanage.plugins.modules import idrac_bios from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, patch, Mock -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import PropertyMock +from mock import MagicMock, patch, Mock +from mock import PropertyMock from io import StringIO from ansible.module_utils._text import to_text from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError diff --git a/tests/unit/plugins/modules/test_idrac_firmware.py b/tests/unit/plugins/modules/test_idrac_firmware.py index 856c7e314..c30ce409e 100644 --- a/tests/unit/plugins/modules/test_idrac_firmware.py +++ b/tests/unit/plugins/modules/test_idrac_firmware.py @@ -2,8 +2,8 @@ # # Dell EMC OpenManage Ansible Modules -# Version 5.1.0 -# Copyright (C) 2020-2021 Dell Inc. or its subsidiaries. All Rights Reserved. +# Version 5.2.0 +# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # @@ -18,7 +18,7 @@ from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError from ansible.module_utils.urls import ConnectionError, SSLValidationError -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, patch, Mock +from mock import MagicMock, patch, Mock from io import StringIO from ansible.module_utils._text import to_text from ansible.module_utils.six.moves.urllib.parse import urlparse, ParseResult diff --git a/tests/unit/plugins/modules/test_idrac_firmware_info.py b/tests/unit/plugins/modules/test_idrac_firmware_info.py index cacd4f5ba..787dba2c7 100644 --- a/tests/unit/plugins/modules/test_idrac_firmware_info.py +++ b/tests/unit/plugins/modules/test_idrac_firmware_info.py @@ -2,8 +2,8 @@ # # Dell EMC OpenManage Ansible Modules -# Version 3.0.0 -# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved. +# Version 5.2.0 +# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # @@ -16,7 +16,7 @@ import json from ansible_collections.dellemc.openmanage.plugins.modules import idrac_firmware_info from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, PropertyMock +from mock import MagicMock, PropertyMock from pytest import importorskip from ansible.module_utils.urls import ConnectionError, SSLValidationError from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError diff --git a/tests/unit/plugins/modules/test_idrac_lifecycle_controller_job_status_info.py b/tests/unit/plugins/modules/test_idrac_lifecycle_controller_job_status_info.py index 8af5891d7..39df4e4c6 100644 --- a/tests/unit/plugins/modules/test_idrac_lifecycle_controller_job_status_info.py +++ b/tests/unit/plugins/modules/test_idrac_lifecycle_controller_job_status_info.py @@ -2,8 +2,8 @@ # # Dell EMC OpenManage Ansible Modules -# Version 2.1.1 -# Copyright (C) 2020 Dell Inc. or its subsidiaries. All Rights Reserved. +# Version 5.2.0 +# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # @@ -15,7 +15,7 @@ import json from ansible_collections.dellemc.openmanage.plugins.modules import idrac_lifecycle_controller_job_status_info from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, PropertyMock +from mock import MagicMock, PropertyMock from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError from ansible.module_utils.urls import ConnectionError, SSLValidationError from io import StringIO diff --git a/tests/unit/plugins/modules/test_idrac_lifecycle_controller_jobs.py b/tests/unit/plugins/modules/test_idrac_lifecycle_controller_jobs.py index 077c5bf9e..491932673 100644 --- a/tests/unit/plugins/modules/test_idrac_lifecycle_controller_jobs.py +++ b/tests/unit/plugins/modules/test_idrac_lifecycle_controller_jobs.py @@ -2,8 +2,8 @@ # # Dell EMC OpenManage Ansible Modules -# Version 2.1.1 -# Copyright (C) 2020 Dell Inc. or its subsidiaries. All Rights Reserved. +# Version 5.2.0 +# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # @@ -18,7 +18,7 @@ from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError from ansible.module_utils.urls import ConnectionError, SSLValidationError -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, PropertyMock +from mock import MagicMock, PropertyMock from io import StringIO from ansible.module_utils._text import to_text from pytest import importorskip diff --git a/tests/unit/plugins/modules/test_idrac_lifecycle_controller_logs.py b/tests/unit/plugins/modules/test_idrac_lifecycle_controller_logs.py index 658b8588e..c1a0894e2 100644 --- a/tests/unit/plugins/modules/test_idrac_lifecycle_controller_logs.py +++ b/tests/unit/plugins/modules/test_idrac_lifecycle_controller_logs.py @@ -2,8 +2,8 @@ # # Dell EMC OpenManage Ansible Modules -# Version 3.0.0 -# Copyright (C) 2020-2021 Dell Inc. or its subsidiaries. All Rights Reserved. +# Version 5.2.0 +# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # @@ -15,7 +15,7 @@ import json from ansible_collections.dellemc.openmanage.plugins.modules import idrac_lifecycle_controller_logs from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, patch, Mock +from mock import MagicMock, patch, Mock from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError from ansible.module_utils.urls import ConnectionError, SSLValidationError from io import StringIO diff --git a/tests/unit/plugins/modules/test_idrac_lifecycle_controller_status_info.py b/tests/unit/plugins/modules/test_idrac_lifecycle_controller_status_info.py index 17151d50b..d00e2bc06 100644 --- a/tests/unit/plugins/modules/test_idrac_lifecycle_controller_status_info.py +++ b/tests/unit/plugins/modules/test_idrac_lifecycle_controller_status_info.py @@ -2,8 +2,8 @@ # # Dell EMC OpenManage Ansible Modules -# Version 3.0.0 -# Copyright (C) 2020-2021 Dell Inc. or its subsidiaries. All Rights Reserved. +# Version 5.2.0 +# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # @@ -15,10 +15,10 @@ import json from ansible_collections.dellemc.openmanage.plugins.modules import idrac_lifecycle_controller_status_info from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, patch, Mock +from mock import MagicMock, patch, Mock from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError from ansible.module_utils.urls import ConnectionError, SSLValidationError -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import PropertyMock +from mock import PropertyMock from io import StringIO from ansible.module_utils._text import to_text from pytest import importorskip diff --git a/tests/unit/plugins/modules/test_idrac_network.py b/tests/unit/plugins/modules/test_idrac_network.py index 87e9cf919..1fba52d40 100644 --- a/tests/unit/plugins/modules/test_idrac_network.py +++ b/tests/unit/plugins/modules/test_idrac_network.py @@ -2,8 +2,8 @@ # # Dell EMC OpenManage Ansible Modules -# Version 3.0.0 -# Copyright (C) 2018-2021 Dell Inc. or its subsidiaries. All Rights Reserved. +# Version 5.2.0 +# Copyright (C) 2018-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # @@ -16,7 +16,7 @@ import json from ansible_collections.dellemc.openmanage.plugins.modules import idrac_network from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, patch, Mock +from mock import MagicMock, patch, Mock from io import StringIO from ansible.module_utils._text import to_text from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError diff --git a/tests/unit/plugins/modules/test_idrac_os_deployment.py b/tests/unit/plugins/modules/test_idrac_os_deployment.py index f5f19a240..d89673566 100644 --- a/tests/unit/plugins/modules/test_idrac_os_deployment.py +++ b/tests/unit/plugins/modules/test_idrac_os_deployment.py @@ -2,7 +2,7 @@ # # Dell EMC OpenManage Ansible Modules -# Version 5.0.0 +# Version 5.2.0 # Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) @@ -15,7 +15,7 @@ import pytest from ansible_collections.dellemc.openmanage.plugins.modules import idrac_os_deployment from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock +from mock import MagicMock from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.utils import set_module_args, exit_json, \ fail_json, AnsibleFailJson, AnsibleExitJson from pytest import importorskip diff --git a/tests/unit/plugins/modules/test_idrac_redfish_storage_controller.py b/tests/unit/plugins/modules/test_idrac_redfish_storage_controller.py index babbb8845..a4ac5bc78 100644 --- a/tests/unit/plugins/modules/test_idrac_redfish_storage_controller.py +++ b/tests/unit/plugins/modules/test_idrac_redfish_storage_controller.py @@ -2,8 +2,8 @@ # # Dell EMC OpenManage Ansible Modules -# Version 2.1.3 -# Copyright (C) 2019-2020 Dell Inc. or its subsidiaries. All Rights Reserved. +# Version 5.2.0 +# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # @@ -16,7 +16,7 @@ import json from ansible_collections.dellemc.openmanage.plugins.modules import idrac_redfish_storage_controller from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants -from ansible.module_utils.six.moves.urllib.error import HTTPError +from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError from ansible.module_utils.urls import ConnectionError, SSLValidationError from io import StringIO from ansible.module_utils._text import to_text @@ -26,7 +26,7 @@ @pytest.fixture -def idrac_connection_mock_for_redfish_storage_controller(mocker, redfish_response_mock): +def redfish_str_controller_conn(mocker, redfish_response_mock): connection_class_mock = mocker.patch( MODULE_PATH + 'idrac_redfish_storage_controller.Redfish') idrac_redfish_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value @@ -37,243 +37,260 @@ def idrac_connection_mock_for_redfish_storage_controller(mocker, redfish_respons class TestIdracRedfishStorageController(FakeAnsibleModule): module = idrac_redfish_storage_controller - msg = "All of the following: key, key_id and old_key are required for ReKey operation." + def test_check_id_exists(self, redfish_str_controller_conn, redfish_response_mock): + param = {"baseuri": "192.168.0.1", "username": "username", "password": "password"} + uri = "/redfish/v1/Dell/Systems/{system_id}/Storage/DellController/{controller_id}" + f_module = self.get_module_mock(params=param) + redfish_response_mock.success = True + redfish_response_mock.status_code = 200 + result = self.module.check_id_exists(f_module, redfish_str_controller_conn, "controller_id", + "RAID.Integrated.1-1", uri) + assert result is None + redfish_response_mock.success = False + redfish_response_mock.status_code = 400 + with pytest.raises(Exception) as ex: + self.module.check_id_exists(f_module, redfish_str_controller_conn, "controller_id", + "RAID.Integrated.1-1", uri) + assert ex.value.args[0] == "controller_id with id 'RAID.Integrated.1-1' not found in system" - @pytest.mark.parametrize("input", - [{"param": {"command": "ReKey", "mode": "LKM", "key_id": "myid"}, "msg": msg}, - {"param": {"command": "ReKey", "mode": "LKM", "old_key": "mykey"}, "msg": msg}, - {"param": {"command": "ReKey", "mode": "LKM", "key": "mykey"}, "msg": msg} - ]) - def test_validate_inputs_error_case_01(self, input): - f_module = self.get_module_mock(params=input["param"]) - with pytest.raises(Exception) as exc: + def test_validate_inputs(self, redfish_str_controller_conn, redfish_response_mock): + param = {"baseuri": "192.168.0.1", "username": "username", "password": "password", + "command": "ReKey", "mode": "LKM"} + f_module = self.get_module_mock(params=param) + with pytest.raises(Exception) as ex: + self.module.validate_inputs(f_module) + assert ex.value.args[0] == "All of the following: key, key_id and old_key are required for 'ReKey' operation." + param.update({"command": "AssignSpare", "target": ["Disk.Bay.0:Enclosure.Internal.0-2:RAID.Integrated.1-1", + "Disk.Bay.1:Enclosure.Internal.0-2:RAID.Integrated.1-1"]}) + f_module = self.get_module_mock(params=param) + with pytest.raises(Exception) as ex: self.module.validate_inputs(f_module) - assert exc.value.args[0] == input["msg"] + assert ex.value.args[0] == "The Fully Qualified Device Descriptor (FQDD) of the target " \ + "physical disk must be only one." + param.update({"volume": ["Disk.Virtual.0:RAID.Mezzanine.1C-0", + "Disk.Virtual.0:RAID.Mezzanine.1C-1"], "target": None}) + with pytest.raises(Exception) as ex: + self.module.validate_inputs(f_module) + assert ex.value.args[0] == "The Fully Qualified Device Descriptor (FQDD) of the target " \ + "virtual drive must be only one." + param.update({"command": "EnableControllerEncryption"}) + f_module = self.get_module_mock(params=param) + with pytest.raises(Exception) as ex: + self.module.validate_inputs(f_module) + assert ex.value.args[0] == "All of the following: key, key_id are " \ + "required for 'EnableControllerEncryption' operation." + param.update({"command": "ChangePDStateToOnline", + "target": ["Disk.Bay.0:Enclosure.Internal.0-2:RAID.Integrated.1-1", + "Disk.Bay.0:Enclosure.Internal.0-2:RAID.Integrated.1-1"]}) + with pytest.raises(Exception) as ex: + self.module.validate_inputs(f_module) + assert ex.value.args[0] == "The Fully Qualified Device Descriptor (FQDD) of the target " \ + "physical disk must be only one." - @pytest.mark.parametrize("input", [{"controller_id": "c1"}]) - def test_check_encryption_capability_failure(self, idrac_connection_mock_for_redfish_storage_controller, - redfish_response_mock, input): - f_module = self.get_module_mock(params=input) - msg = "Encryption is not supported on the storage controller: c1" + def test_target_identify_pattern(self, redfish_str_controller_conn, redfish_response_mock): + param = {"baseuri": "192.168.0.1", "username": "username", "password": "password", + "command": "BlinkTarget", "target": "Disk.Bay.1:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1", + "volume_id": "Disk.Virtual.0:RAID.Mezzanine.1C-1"} + f_module = self.get_module_mock(params=param) redfish_response_mock.success = True - redfish_response_mock.json_data = { - 'Oem': {'Dell': {'DellController': {'SecurityStatus': "EncryptionNotCapable"}}}} - with pytest.raises(Exception) as exc: - self.module.check_encryption_capability(f_module, idrac_connection_mock_for_redfish_storage_controller) - assert exc.value.args[0] == msg + redfish_response_mock.status_code = 200 + result = self.module.target_identify_pattern(f_module, redfish_str_controller_conn) + assert result.status_code == 200 + f_module.check_mode = True + with pytest.raises(Exception) as ex: + self.module.target_identify_pattern(f_module, redfish_str_controller_conn) + assert ex.value.args[0] == "Changes found to be applied." - def test_check_raid_service(self, idrac_connection_mock_for_redfish_storage_controller, - redfish_response_mock): - f_module = self.get_module_mock() - msg = "Installed version of iDRAC does not support this feature using Redfish API" - redfish_response_mock.success = False - with pytest.raises(Exception) as exc: - self.module.check_raid_service(f_module, idrac_connection_mock_for_redfish_storage_controller) - assert exc.value.args[0] == msg - - @pytest.mark.parametrize("input", - [ - {"error": urllib_error.URLError("TESTS")} - ]) - def test_check_raid_service_exceptions(self, idrac_connection_mock_for_redfish_storage_controller, input): - f_module = self.get_module_mock(params=input) - idrac_connection_mock_for_redfish_storage_controller.invoke_request.side_effect = input["error"] - with pytest.raises(Exception) as exc: - self.module.check_raid_service(f_module, idrac_connection_mock_for_redfish_storage_controller) - assert "TESTS" in exc.value.args[0] + def test_ctrl_reset_config(self, redfish_str_controller_conn, redfish_response_mock, mocker): + param = {"baseuri": "192.168.0.1", "username": "username", "password": "password", + "controller_id": "RAID.Mezzanine.1C-1", "command": "ResetConfig"} + f_module = self.get_module_mock(params=param) + mocker.patch(MODULE_PATH + "idrac_redfish_storage_controller.check_id_exists", return_value=None) + redfish_str_controller_conn.json_data = {"Members": ["virtual_drive"]} + redfish_response_mock.headers = {"Location": "/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/JID_XXXXXXXXXXXXX"} + result = self.module.ctrl_reset_config(f_module, redfish_str_controller_conn) + assert result[2] == "JID_XXXXXXXXXXXXX" + f_module.check_mode = True + with pytest.raises(Exception) as ex: + self.module.ctrl_reset_config(f_module, redfish_str_controller_conn) + assert ex.value.args[0] == "Changes found to be applied." + redfish_response_mock.json_data = {"Members": []} + with pytest.raises(Exception) as ex: + self.module.ctrl_reset_config(f_module, redfish_str_controller_conn) + assert ex.value.args[0] == "No changes found to be applied." - def test_check_raid_service_HttpError_exception(self, idrac_connection_mock_for_redfish_storage_controller, - redfish_default_args): - f_module = self.get_module_mock(params=redfish_default_args) - json_str = to_text(json.dumps({"data": "out"})) - idrac_connection_mock_for_redfish_storage_controller.invoke_request.side_effect = HTTPError( - 'http://testhost.com', 400, 'http error message', - {"accept-type": "application/json"}, StringIO(json_str)) - with pytest.raises(Exception) as exc: - self.module.check_raid_service(f_module, idrac_connection_mock_for_redfish_storage_controller) - assert exc.value.args[0] == "Installed version of iDRAC does not support this feature using Redfish API" + def test_hot_spare_config(self, redfish_str_controller_conn, redfish_response_mock): + param = {"baseuri": "192.168.0.1", "username": "username", "password": "password", + "command": "AssignSpare", "target": "Disk.Bay.1:Enclosure.Internal.0-2:RAID.Integrated.1-1"} + f_module = self.get_module_mock(params=param) + redfish_response_mock.json_data = {"HotspareType": "None"} + redfish_response_mock.headers = {"Location": "/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/JID_XXXXXXXXXXXXX"} + result = self.module.hot_spare_config(f_module, redfish_str_controller_conn) + assert result[2] == "JID_XXXXXXXXXXXXX" + f_module.check_mode = True + with pytest.raises(Exception) as ex: + self.module.hot_spare_config(f_module, redfish_str_controller_conn) + assert ex.value.args[0] == "Changes found to be applied." + redfish_response_mock.json_data = {"HotspareType": "Global"} + with pytest.raises(Exception) as ex: + self.module.hot_spare_config(f_module, redfish_str_controller_conn) + assert ex.value.args[0] == "No changes found to be applied." - @pytest.mark.parametrize("input", [{"volume_id": ["v1"]}]) - def test_check_volume_array_exists(self, idrac_connection_mock_for_redfish_storage_controller, - redfish_response_mock, input): - f_module = self.get_module_mock(params=input) - msg = "Unable to locate the virtual disk with the ID: v1" - redfish_response_mock.success = False - with pytest.raises(Exception) as exc: - self.module.check_volume_array_exists(f_module, - idrac_connection_mock_for_redfish_storage_controller) - assert exc.value.args[0] == msg + def test_ctrl_key(self, redfish_str_controller_conn, redfish_response_mock, mocker): + param = {"baseuri": "192.168.0.1", "username": "username", "password": "password", + "command": "SetControllerKey", "controller_id": "RAID.Integrated.1-1", "mode": "LKM"} + mocker.patch(MODULE_PATH + "idrac_redfish_storage_controller.check_id_exists", return_value=None) + f_module = self.get_module_mock(params=param) + redfish_response_mock.json_data = {"SecurityStatus": "EncryptionNotCapable", "KeyID": None} + with pytest.raises(Exception) as ex: + self.module.ctrl_key(f_module, redfish_str_controller_conn) + assert ex.value.args[0] == "The storage controller 'RAID.Integrated.1-1' does not support encryption." + f_module.check_mode = True + redfish_response_mock.json_data = {"SecurityStatus": "EncryptionCapable", "KeyID": None} + with pytest.raises(Exception) as ex: + self.module.ctrl_key(f_module, redfish_str_controller_conn) + assert ex.value.args[0] == "Changes found to be applied." + redfish_response_mock.json_data = {"SecurityStatus": "EncryptionCapable", "KeyID": "Key@123"} + with pytest.raises(Exception) as ex: + self.module.ctrl_key(f_module, redfish_str_controller_conn) + assert ex.value.args[0] == "No changes found to be applied." + f_module = self.get_module_mock(params=param) + f_module.check_mode = True + param.update({"command": "ReKey"}) + with pytest.raises(Exception) as ex: + self.module.ctrl_key(f_module, redfish_str_controller_conn) + assert ex.value.args[0] == "Changes found to be applied." + param.update({"command": "RemoveControllerKey"}) + f_module = self.get_module_mock(params=param) + f_module.check_mode = True + with pytest.raises(Exception) as ex: + self.module.ctrl_key(f_module, redfish_str_controller_conn) + assert ex.value.args[0] == "Changes found to be applied." + redfish_response_mock.json_data = {"SecurityStatus": "EncryptionCapable", "KeyID": None} + with pytest.raises(Exception) as ex: + self.module.ctrl_key(f_module, redfish_str_controller_conn) + assert ex.value.args[0] == "No changes found to be applied." + param.update({"command": "EnableControllerEncryption"}) + f_module = self.get_module_mock(params=param) + f_module.check_mode = True + with pytest.raises(Exception) as ex: + self.module.ctrl_key(f_module, redfish_str_controller_conn) + assert ex.value.args[0] == "Changes found to be applied." + redfish_response_mock.json_data = {"SecurityStatus": "SecurityKeyAssigned", "KeyID": None} + with pytest.raises(Exception) as ex: + self.module.ctrl_key(f_module, redfish_str_controller_conn) + assert ex.value.args[0] == "No changes found to be applied." + f_module.check_mode = False + redfish_response_mock.json_data = {"SecurityStatus": "EncryptionCapable", "KeyID": None} + redfish_response_mock.headers = {"Location": "/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/JID_XXXXXXXXXXXXX"} + result = self.module.ctrl_key(f_module, redfish_str_controller_conn) + assert result[2] == "JID_XXXXXXXXXXXXX" - def test_check_volume_array_exists_HttpError_exceptions(self, redfish_response_mock, redfish_default_args, - idrac_connection_mock_for_redfish_storage_controller): - redfish_default_args.update({"volume_id": ["v1"]}) - redfish_response_mock.json_data = {"volume_id": ["v1"]} - f_module = self.get_module_mock(params=redfish_default_args) - json_str = to_text(json.dumps({"data": "out"})) - idrac_connection_mock_for_redfish_storage_controller.invoke_request.side_effect = HTTPError( - 'http://testhost.com', 400, 'http error message', - {"accept-type": "application/json"}, StringIO(json_str)) - with pytest.raises(Exception) as exc: - self.module.check_volume_array_exists(f_module, idrac_connection_mock_for_redfish_storage_controller) - assert exc.value.args[0] == "Unable to locate the virtual disk with the ID: v1" + def test_convert_raid_status(self, redfish_str_controller_conn, redfish_response_mock): + param = {"baseuri": "192.168.0.1", "username": "username", "password": "password", + "command": "ConvertToRAID", "target": ["Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1", + "Disk.Bay.1:Enclosure.Internal.0-1:RAID.Slot.1-1"]} + f_module = self.get_module_mock(params=param) + redfish_response_mock.json_data = {"Oem": {"Dell": {"DellPhysicalDisk": {"RaidStatus": "NonRAID"}}}} + redfish_response_mock.headers = {"Location": "/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/JID_XXXXXXXXXXXXX"} + result = self.module.convert_raid_status(f_module, redfish_str_controller_conn) + assert result[2] == "JID_XXXXXXXXXXXXX" + f_module.check_mode = True + with pytest.raises(Exception) as ex: + self.module.convert_raid_status(f_module, redfish_str_controller_conn) + assert ex.value.args[0] == "Changes found to be applied." + f_module.check_mode = False + redfish_response_mock.json_data = {"Oem": {"Dell": {"DellPhysicalDisk": {"RaidStatus": "Ready"}}}} + with pytest.raises(Exception) as ex: + self.module.convert_raid_status(f_module, redfish_str_controller_conn) + assert ex.value.args[0] == "No changes found to be applied." - def test_check_volume_array_exists_exceptions(self, redfish_response_mock, redfish_default_args, - idrac_connection_mock_for_redfish_storage_controller): - redfish_default_args.update({"volume_id": ["v1"]}) - redfish_response_mock.json_data = {"volume_id": ["v1"]} - f_module = self.get_module_mock(params=redfish_default_args) - idrac_connection_mock_for_redfish_storage_controller.invoke_request.side_effect = urllib_error.URLError('TESTS') - with pytest.raises(Exception) as exc: - self.module.check_volume_array_exists(f_module, idrac_connection_mock_for_redfish_storage_controller) - assert "TESTS" in exc.value.args[0] + def test_change_pd_status(self, redfish_str_controller_conn, redfish_response_mock): + param = {"baseuri": "192.168.0.1", "username": "username", "password": "password", + "command": "ChangePDStateToOnline", + "target": ["Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1", + "Disk.Bay.1:Enclosure.Internal.0-1:RAID.Slot.1-1"]} + f_module = self.get_module_mock(params=param) + redfish_response_mock.json_data = {"Oem": {"Dell": {"DellPhysicalDisk": {"RaidStatus": "NonRAID"}}}} + redfish_response_mock.headers = {"Location": "/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/JID_XXXXXXXXXXXXX"} + result = self.module.change_pd_status(f_module, redfish_str_controller_conn) + assert result[2] == "JID_XXXXXXXXXXXXX" + f_module.check_mode = True + with pytest.raises(Exception) as ex: + self.module.change_pd_status(f_module, redfish_str_controller_conn) + assert ex.value.args[0] == "Changes found to be applied." + f_module.check_mode = False + redfish_response_mock.json_data = {"Oem": {"Dell": {"DellPhysicalDisk": {"RaidStatus": "Online"}}}} + with pytest.raises(Exception) as ex: + self.module.change_pd_status(f_module, redfish_str_controller_conn) + assert ex.value.args[0] == "No changes found to be applied." - @pytest.mark.parametrize("input", [{"item": "x1"}]) - def test_check_id_exists(self, - idrac_connection_mock_for_redfish_storage_controller, - redfish_response_mock, input): - f_module = self.get_module_mock(params=input) - msg = "item with id x1 not found in system" + @pytest.mark.parametrize("exc_type", [RuntimeError, URLError, SSLValidationError, ConnectionError, KeyError, + ImportError, ValueError, TypeError]) + def test_main_error(self, redfish_str_controller_conn, redfish_response_mock, mocker, + exc_type, redfish_default_args): + param = {"baseuri": "192.168.0.1", "username": "username", "password": "password", + "command": "ResetConfig", "controller_id": "RAID.Integrated.1-1"} + redfish_default_args.update(param) + mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.validate_inputs', return_value=None) redfish_response_mock.success = False - with pytest.raises(Exception) as exc: - self.module.check_id_exists(f_module, - idrac_connection_mock_for_redfish_storage_controller, - "item", "uri") - assert exc.value.args[0] == msg - - def test_check_id_exists_exceptions(self, idrac_connection_mock_for_redfish_storage_controller): - f_module = self.get_module_mock() - idrac_connection_mock_for_redfish_storage_controller.invoke_request.side_effect = urllib_error.URLError('TESTS') - with pytest.raises(Exception) as exc: - self.module.check_id_exists(f_module, - idrac_connection_mock_for_redfish_storage_controller, - "item", "uri") - assert "TESTS" in exc.value.args[0] - - def test_check_id_exists_HttpError_exceptions(self, idrac_connection_mock_for_redfish_storage_controller, - redfish_default_args): - f_module = self.get_module_mock(params=redfish_default_args) - f_module = self.get_module_mock(params=redfish_default_args) + redfish_response_mock.status_code = 400 json_str = to_text(json.dumps({"data": "out"})) - idrac_connection_mock_for_redfish_storage_controller.invoke_request.side_effect = HTTPError( - 'http://testhost.com', 400, 'http error message', - {"accept-type": "application/json"}, StringIO(json_str)) - with pytest.raises(Exception) as exc: - self.module.check_id_exists(f_module, - idrac_connection_mock_for_redfish_storage_controller, - "item", "uri") - assert exc.value.args[0] == "item with id None not found in system" - - arg_list1 = [{"command": "ResetConfig", "controller_id": "c1"}, - {"command": "RemoveControllerKey", "controller_id": "c1"}, - {"command": "ReKey", "controller_id": "c1"}, - {"command": "SetControllerKey", "controller_id": "c1", "key": "key", "key_id": "key_id"}, - {"command": "AssignSpare", "volume_id": ["v1"], "target": "target"}] + if exc_type == URLError: + mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.ctrl_reset_config', + side_effect=exc_type("url open error")) + result = self._run_module(redfish_default_args) + assert result["unreachable"] is True + elif exc_type not in [HTTPError, SSLValidationError]: + mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.ctrl_reset_config', + side_effect=exc_type('exception message')) + result = self._run_module_with_fail_json(redfish_default_args) + assert result['failed'] is True + else: + mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.ctrl_reset_config', + side_effect=exc_type('http://testhost.com', 400, 'http error message', + {"accept-type": "application/json"}, StringIO(json_str))) + result = self._run_module_with_fail_json(redfish_default_args) + assert result['failed'] is True + assert 'msg' in result - @pytest.mark.parametrize("param", arg_list1) - def test_idrac_redfish_storage_controller_main_success_case_01(self, - mocker, - redfish_default_args, - redfish_response_mock, - idrac_connection_mock_for_redfish_storage_controller, - param): - mocker.patch( - MODULE_PATH + 'idrac_redfish_storage_controller.validate_inputs') - mocker.patch( - MODULE_PATH + 'idrac_redfish_storage_controller.check_raid_service') - mocker.patch( - MODULE_PATH + 'idrac_redfish_storage_controller.check_id_exists') - mocker.patch( - MODULE_PATH + 'idrac_redfish_storage_controller.check_volume_array_exists') - mocker.patch( - MODULE_PATH + 'idrac_redfish_storage_controller.check_encryption_capability') - f_module = self.get_module_mock(params=param) - redfish_response_mock.success = True - redfish_response_mock.headers = {"Location": "Jobs/1234"} + def test_main_success(self, redfish_str_controller_conn, redfish_response_mock, redfish_default_args, mocker): + param = {"baseuri": "192.168.0.1", "username": "username", "password": "password", + "command": "SetControllerKey", "key": "Key@123", "key_id": "keyid@123", + "controller_id": "RAID.Integrated.1-1", + "target": ["Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1"]} redfish_default_args.update(param) + mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.validate_inputs', return_value=None) + mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.ctrl_key', + return_value=("", "", "JID_XXXXXXXXXXXXX")) result = self._run_module(redfish_default_args) - assert result["changed"] is True - assert result['msg'] == "Successfully submitted the job that performs the {0} operation".format( - param["command"]) - assert result["task"]["id"] == "1234" - assert result["task"]["uri"] == "Jobs/1234" - - arg_list1 = [{"command": "ResetConfig", "controller_id": "c1"}, - {"command": "RemoveControllerKey", "controller_id": "c1"}, - {"command": "ReKey", "controller_id": "c1"}, - {"command": "SetControllerKey", "controller_id": "c1", "key": "key", "key_id": "key_id"}, - {"command": "AssignSpare", "target": "target"}] - - @pytest.mark.parametrize("param", arg_list1) - def test_idrac_redfish_storage_controller_main_success_case_02(self, - mocker, - redfish_default_args, - redfish_response_mock, - idrac_connection_mock_for_redfish_storage_controller, - param): - mocker.patch( - MODULE_PATH + 'idrac_redfish_storage_controller.validate_inputs') - mocker.patch( - MODULE_PATH + 'idrac_redfish_storage_controller.check_raid_service') - mocker.patch( - MODULE_PATH + 'idrac_redfish_storage_controller.check_id_exists') - mocker.patch( - MODULE_PATH + 'idrac_redfish_storage_controller.check_volume_array_exists') - mocker.patch( - MODULE_PATH + 'idrac_redfish_storage_controller.check_encryption_capability') - f_module = self.get_module_mock(params=param) - redfish_response_mock.success = True - redfish_response_mock.headers = {"Location": "Jobs/1234"} + assert result["task"]["id"] == "JID_XXXXXXXXXXXXX" + param.update({"command": "AssignSpare"}) redfish_default_args.update(param) + mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.hot_spare_config', + return_value=("", "", "JID_XXXXXXXXXXXXX")) result = self._run_module(redfish_default_args) - assert result["changed"] is True - assert result['msg'] == "Successfully submitted the job that performs the {0} operation".format( - param["command"]) - assert result["task"]["id"] == "1234" - assert result["task"]["uri"] == "Jobs/1234" - - @pytest.mark.parametrize("exc_type", - [RuntimeError, urllib_error.URLError, SSLValidationError, ConnectionError, KeyError, - ImportError, - ValueError, TypeError]) - def test_idrac_redfish_storage_controller_main_exception_case(self, exc_type, mocker, - redfish_default_args, - redfish_response_mock, - idrac_connection_mock_for_redfish_storage_controller): - mocker.patch( - MODULE_PATH + 'idrac_redfish_storage_controller.check_encryption_capability', - side_effect=exc_type('test')) - mocker.patch( - MODULE_PATH + 'idrac_redfish_storage_controller.check_raid_service', - side_effect=exc_type('test')) - mocker.patch( - MODULE_PATH + 'idrac_redfish_storage_controller.check_id_exists', - side_effect=exc_type('test')) - mocker.patch( - MODULE_PATH + 'idrac_redfish_storage_controller.check_volume_array_exists', - side_effect=exc_type('test')) - mocker.patch( - MODULE_PATH + 'idrac_redfish_storage_controller.validate_inputs', - side_effect=exc_type('test')) - result = self._run_module_with_fail_json(redfish_default_args) - assert 'power_state' not in result - assert 'msg' in result - assert result['failed'] is True - - arg_list1 = [{"command": "ResetConfig", "controller_id": "c1"}, - {"command": "RemoveControllerKey", "controller_id": "c1"}, - {"command": "ReKey", "controller_id": "c1"}, - {"command": "SetControllerKey", "controller_id": "c1", "key": "key", "key_id": "key_id"}, - {"command": "AssignSpare", "target": "target"}] - - @pytest.mark.parametrize("param", arg_list1) - def test_idrac_redfish_main_HTTPError_case(self, param, idrac_connection_mock_for_redfish_storage_controller, - redfish_default_args, mocker): + assert result["task"]["id"] == "JID_XXXXXXXXXXXXX" + param.update({"command": "BlinkTarget"}) redfish_default_args.update(param) - json_str = to_text(json.dumps({"data": "out"})) - mocker.patch( - MODULE_PATH + 'idrac_redfish_storage_controller.check_raid_service', - side_effect=HTTPError('http://testhost.com', 400, 'http error message', - {"accept-type": "application/json"}, StringIO(json_str))) - result = self._run_module_with_fail_json(redfish_default_args) - assert 'msg' in result - assert result['failed'] is True + redfish_response_mock.status_code = 200 + mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.target_identify_pattern', + return_value=redfish_response_mock) + result = self._run_module(redfish_default_args) + assert result["msg"] == "Successfully performed the 'BlinkTarget' operation." + param.update({"command": "ConvertToRAID"}) + redfish_default_args.update(param) + mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.convert_raid_status', + return_value=("", "", "JID_XXXXXXXXXXXXX")) + result = self._run_module(redfish_default_args) + assert result["task"]["id"] == "JID_XXXXXXXXXXXXX" + param.update({"command": "ChangePDStateToOnline", "job_wait": True}) + redfish_default_args.update(param) + mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.change_pd_status', + return_value=("", "", "JID_XXXXXXXXXXXXX")) + mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.wait_for_job_completion', + return_value=(redfish_response_mock, "")) + mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.strip_substr_dict', + return_value={"JobState": "Failed"}) + result = self._run_module(redfish_default_args) + assert result["task"]["id"] == "JID_XXXXXXXXXXXXX" diff --git a/tests/unit/plugins/modules/test_idrac_reset.py b/tests/unit/plugins/modules/test_idrac_reset.py index cc5e5ce66..3f4ca4977 100644 --- a/tests/unit/plugins/modules/test_idrac_reset.py +++ b/tests/unit/plugins/modules/test_idrac_reset.py @@ -2,8 +2,8 @@ # # Dell EMC OpenManage Ansible Modules -# Version 3.0.0 -# Copyright (C) 2020-2021 Dell Inc. or its subsidiaries. All Rights Reserved. +# Version 5.2.0 +# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # @@ -17,7 +17,7 @@ from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError from ansible.module_utils.urls import ConnectionError, SSLValidationError -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, patch, Mock +from mock import MagicMock, patch, Mock from io import StringIO from ansible.module_utils._text import to_text diff --git a/tests/unit/plugins/modules/test_idrac_server_config_profile.py b/tests/unit/plugins/modules/test_idrac_server_config_profile.py index 060f73069..2c43834e7 100644 --- a/tests/unit/plugins/modules/test_idrac_server_config_profile.py +++ b/tests/unit/plugins/modules/test_idrac_server_config_profile.py @@ -2,8 +2,8 @@ # # Dell EMC OpenManage Ansible Modules -# Version 3.5.0 -# Copyright (C) 2020-2021 Dell Inc. or its subsidiaries. All Rights Reserved. +# Version 5.2.0 +# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # @@ -15,7 +15,7 @@ from ansible_collections.dellemc.openmanage.plugins.modules import idrac_server_config_profile from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants,\ AnsibleExitJson -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, patch, Mock +from mock import MagicMock, patch, Mock from pytest import importorskip from ansible.module_utils.six.moves.urllib.parse import urlparse, ParseResult MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.' diff --git a/tests/unit/plugins/modules/test_idrac_syslog.py b/tests/unit/plugins/modules/test_idrac_syslog.py index e57093e43..ae89c2808 100644 --- a/tests/unit/plugins/modules/test_idrac_syslog.py +++ b/tests/unit/plugins/modules/test_idrac_syslog.py @@ -2,8 +2,8 @@ # # Dell EMC OpenManage Ansible Modules -# Version 3.0.0 -# Copyright (C) 2018-2021 Dell Inc. or its subsidiaries. All Rights Reserved. +# Version 5.2.0 +# Copyright (C) 2018-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # @@ -18,7 +18,7 @@ from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError from ansible.module_utils.urls import ConnectionError, SSLValidationError from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, patch, Mock +from mock import MagicMock, patch, Mock from io import StringIO from ansible.module_utils._text import to_text from pytest import importorskip diff --git a/tests/unit/plugins/modules/test_idrac_system_info.py b/tests/unit/plugins/modules/test_idrac_system_info.py index da8acaaed..dbbb130e9 100644 --- a/tests/unit/plugins/modules/test_idrac_system_info.py +++ b/tests/unit/plugins/modules/test_idrac_system_info.py @@ -2,8 +2,8 @@ # # Dell EMC OpenManage Ansible Modules -# Version 3.0.0 -# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved. +# Version 5.2.0 +# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # @@ -16,7 +16,7 @@ import json from ansible_collections.dellemc.openmanage.plugins.modules import idrac_system_info from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, Mock +from mock import MagicMock, Mock from pytest import importorskip from ansible.module_utils.urls import ConnectionError, SSLValidationError from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError diff --git a/tests/unit/plugins/modules/test_idrac_timezone_ntp.py b/tests/unit/plugins/modules/test_idrac_timezone_ntp.py index 5c06889a9..08ed69ca3 100644 --- a/tests/unit/plugins/modules/test_idrac_timezone_ntp.py +++ b/tests/unit/plugins/modules/test_idrac_timezone_ntp.py @@ -2,8 +2,8 @@ # # Dell EMC OpenManage Ansible Modules -# Version 3.0.0 -# Copyright (C) 2020-2021 Dell Inc. or its subsidiaries. All Rights Reserved. +# Version 5.2.0 +# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # @@ -15,7 +15,7 @@ import json from ansible_collections.dellemc.openmanage.plugins.modules import idrac_timezone_ntp from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, patch, Mock, PropertyMock +from mock import MagicMock, patch, Mock, PropertyMock from io import StringIO from ansible.module_utils._text import to_text from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError diff --git a/tests/unit/plugins/modules/test_idrac_user.py b/tests/unit/plugins/modules/test_idrac_user.py index e959f182f..2fa528d0d 100644 --- a/tests/unit/plugins/modules/test_idrac_user.py +++ b/tests/unit/plugins/modules/test_idrac_user.py @@ -2,8 +2,8 @@ # # Dell EMC OpenManage Ansible Modules -# Version 2.1.2 -# Copyright (C) 2020 Dell Inc. or its subsidiaries. All Rights Reserved. +# Version 5.2.0 +# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # @@ -17,7 +17,7 @@ from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError from ansible.module_utils.urls import ConnectionError, SSLValidationError from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, patch, Mock +from mock import MagicMock, patch, Mock from ansible.module_utils._text import to_text from io import StringIO diff --git a/tests/unit/plugins/modules/test_ome_application_console_preferences.py b/tests/unit/plugins/modules/test_ome_application_console_preferences.py new file mode 100644 index 000000000..3a86a3f0d --- /dev/null +++ b/tests/unit/plugins/modules/test_ome_application_console_preferences.py @@ -0,0 +1,2240 @@ +# -*- coding: utf-8 -*- + +# +# Dell EMC OpenManage Ansible Modules +# Version 5.2.0 +# Copyright (C) 2022 Dell Inc. or its subsidiaries. All Rights Reserved. + +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) +# + +from __future__ import (absolute_import, division, print_function) + +__metaclass__ = type + +import json +from io import StringIO + +import pytest +from ansible.module_utils._text import to_text +from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError +from ssl import SSLError +from ansible.module_utils.urls import ConnectionError, SSLValidationError +from ansible_collections.dellemc.openmanage.plugins.modules import ome_application_console_preferences +from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants, \ + AnsibleFailJSonException + +SUCCESS_MSG = "Successfully updated the Console Preferences settings." +SETTINGS_URL = "ApplicationService/Settings" +NO_CHANGES = "No changes found to be applied." +CHANGES_FOUND = "Changes found to be applied." +HEALTH_CHECK_UNIT_REQUIRED = "The health check unit is required when health check interval is specified." +HEALTH_CHECK_INTERVAL_REQUIRED = "The health check interval is required when health check unit is specified." +HEALTH_CHECK_INTERVAL_INVALID = "The health check interval specified is invalid for the {0}" +JOB_URL = "JobService/Jobs" +CIFS_URL = "ApplicationService/Actions/ApplicationService.UpdateShareTypeSettings" +CONSOLE_SETTINGS_VALUES = ["DATA_PURGE_INTERVAL", "EMAIL_SENDER", "TRAP_FORWARDING_SETTING", + "MX7000_ONBOARDING_PREF", "REPORTS_MAX_RESULTS_LIMIT", + "DISCOVERY_APPROVAL_POLICY", "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION", + "DEVICE_PREFERRED_NAME", "INVALID_DEVICE_HOSTNAME", "COMMON_MAC_ADDRESSES", + "CONSOLE_CONNECTION_SETTING", "MIN_PROTOCOL_VERSION", "SHARE_TYPE"] +MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_application_console_preferences.' +MODULE_UTIL_PATH = 'ansible_collections.dellemc.openmanage.plugins.module_utils.ome.' + + +@pytest.fixture +def ome_connection_mock_for_application_console_preferences(mocker, ome_response_mock): + connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME') + ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value + ome_connection_mock_obj.invoke_request.return_value = ome_response_mock + return ome_connection_mock_obj + + +class TestOmeAppConsolePreferences(FakeAnsibleModule): + module = ome_application_console_preferences + + @pytest.mark.parametrize("params", [{"module_args": {"report_row_limit": 123, + "mx7000_onboarding_preferences": "all", + "email_sender_settings": "admin@dell.com", + "trap_forwarding_format": "Normalized", + "metrics_collection_settings": 361}, + "json_data": {"value": [ + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "361", + "DataType": "java.lang.Integer", + "GroupName": "" + }, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "TRAP_FORWARDING_SETTING", + "DefaultValue": "AsIs", + "Value": "Normalized", + "DataType": "java.lang.String", + "GroupName": "" + }, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MX7000_ONBOARDING_PREF", + "DefaultValue": "all", + "Value": "all", + "DataType": "java.lang.String", + "GroupName": "" + }, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "REPORTS_MAX_RESULTS_LIMIT", + "DefaultValue": "0", + "Value": "123", + "DataType": "java.lang.Integer", + "GroupName": "" + }, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "EMAIL_SENDER", + "DefaultValue": "omcadmin@dell.com", + "Value": "admin@dell.com", + "DataType": "java.lang.String", + "GroupName": "" + }, + ]}, + }]) + def test_fetch_cp_settings(self, params, ome_connection_mock_for_application_console_preferences, + ome_response_mock): + ome_response_mock.success = True + f_module = self.get_module_mock(params=params['module_args']) + ome_response_mock.json_data = params["json_data"] + ret_data = self.module.fetch_cp_settings(ome_connection_mock_for_application_console_preferences) + assert ret_data == params["json_data"]["value"] + + @pytest.mark.parametrize("params", [{"module_args": {"device_health": {"health_check_interval": 55, + "health_check_interval_unit": "Minutes"}}, + "json_data": {"@odata.type": "#JobService.Job", + "@odata.id": "/api/JobService/Jobs(10093)", + "Id": 10093, + "JobName": "Global Health Task", + "JobDescription": "Global Health Task", + "NextRun": "2022-03-15 05:25:00.0", + "LastRun": "2022-03-15 05:24:00.043", + "StartTime": None, + "EndTime": None, + "Schedule": "0 0/1 * 1/1 * ? *", + "State": "Enabled", + "CreatedBy": "admin", + "UpdatedBy": None, + "Visible": None, + "Editable": None, + "Builtin": False, + "UserGenerated": True, + "Targets": [{"JobId": 10093, "Id": 500, "Data": "", "TargetType": {"Id": 6000, "Name": "GROUP"}}], + "Params": [{"JobId": 10093, "Key": "metricType", "Value": "40, 50"}], + "LastRunStatus": {"@odata.type": "#JobService.JobStatus", "Id": 2060, "Name": "Completed"}, + "JobType": {"@odata.type": "#JobService.JobType", "Id": 6, "Name": "Health_Task", "Internal": False}, + "JobStatus": {"@odata.type": "#JobService.JobStatus", "Id": 2020, "Name": "Scheduled"}, + "ExecutionHistories@odata.navigationLink": "/api/JobService/Jobs(10093)/ExecutionHistories", + "LastExecutionDetail": {"@odata.id": "/api/JobService/Jobs(10093)/LastExecutionDetail"}}, + }]) + def test_job_details(self, params, ome_connection_mock_for_application_console_preferences, + ome_response_mock): + ome_response_mock.success = True + f_module = self.get_module_mock(params=params['module_args']) + ome_response_mock.json_data = {"value": [params["json_data"]]} + ret_data = self.module.job_details(ome_connection_mock_for_application_console_preferences) + assert ret_data == params["json_data"] + + @pytest.mark.parametrize("params", + [ + {"module_args": + { + "report_row_limit": 123, + "mx7000_onboarding_preferences": "all", + "email_sender_settings": "admin@dell.com", + "trap_forwarding_format": "Normalized", + "metrics_collection_settings": 361 + }, + "payload": + {"ConsoleSetting": + [ + { + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "361", + "DataType": "java.lang.Integer", + "GroupName": "" + }, + { + "Name": "TRAP_FORWARDING_SETTING", + "DefaultValue": "AsIs", + "Value": "AsIs", + "DataType": "java.lang.String", + "GroupName": "" + }, + { + "Name": "DEVICE_PREFERRED_NAME", + "DefaultValue": "SLOT_NAME", + "Value": "PREFER_DNS,PREFER_IDRAC_SYSTEM_HOSTNAME", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING" + } + ]}, + "curr_payload": {"DATA_PURGE_INTERVAL": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "361", + "DataType": "java.lang.Integer", + "GroupName": ""}, + "TRAP_FORWARDING_SETTING": + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "TRAP_FORWARDING_SETTING", + "DefaultValue": "AsIs", + "Value": "Normalized", + "DataType": "java.lang.String", + "GroupName": ""}, + "MX7000_ONBOARDING_PREF": + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MX7000_ONBOARDING_PREF", + "DefaultValue": "all", + "Value": "all", + "DataType": "java.lang.String", + "GroupName": ""}, + "REPORTS_MAX_RESULTS_LIMIT": + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "REPORTS_MAX_RESULTS_LIMIT", + "DefaultValue": "0", + "Value": "123", + "DataType": "java.lang.Integer", + "GroupName": ""}, + "EMAIL_SENDER": + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "EMAIL_SENDER", + "DefaultValue": "omcadmin@dell.com", + "Value": "admin@dell.com", + "DataType": "java.lang.String", + "GroupName": ""}, + "DISCOVERY_APPROVAL_POLICY": + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DISCOVERY_APPROVAL_POLICY", + "DefaultValue": "Automatic", + "Value": "Automatic", + "DataType": "java.lang.String", + "GroupName": ""}, + "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION": + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION", + "DefaultValue": "false", + "Value": "true", + "DataType": "java.lang.Boolean", + "GroupName": ""}, + "DEVICE_PREFERRED_NAME": + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DEVICE_PREFERRED_NAME", + "DefaultValue": "HOST_NAME", + "Value": "PREFER_DNS,PREFER_IDRAC_HOSTNAME", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + "INVALID_DEVICE_HOSTNAME": + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "INVALID_DEVICE_HOSTNAME", + "DefaultValue": "", + "Value": "localhost", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + "COMMON_MAC_ADDRESSES": + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "COMMON_MAC_ADDRESSES", + "DefaultValue": "", + "Value": "::", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + "MIN_PROTOCOL_VERSION": + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MIN_PROTOCOL_VERSION", + "DefaultValue": "V2", + "Value": "V2", + "DataType": "java.lang.String", + "GroupName": "CIFS_PROTOCOL_SETTINGS"}, + "CONSOLE_CONNECTION_SETTING": + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "CONSOLE_CONNECTION_SETTING", + "DefaultValue": "last_known", + "Value": "last_known", + "DataType": "java.lang.String", + "GroupName": "CONSOLE_CONNECTION_SETTING"}, + "SHARE_TYPE": + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "SHARE_TYPE", + "DefaultValue": "CIFS", + "Value": "CIFS", + "DataType": "java.lang.String", + "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}}, + "json_data": {"value": [ + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "361", + "DataType": "java.lang.Integer", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "TRAP_FORWARDING_SETTING", + "DefaultValue": "AsIs", + "Value": "Normalized", + "DataType": "java.lang.String", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MX7000_ONBOARDING_PREF", + "DefaultValue": "all", + "Value": "all", + "DataType": "java.lang.String", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "REPORTS_MAX_RESULTS_LIMIT", + "DefaultValue": "0", + "Value": "123", + "DataType": "java.lang.Integer", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "EMAIL_SENDER", + "DefaultValue": "omcadmin@dell.com", + "Value": "admin@dell.com", + "DataType": "java.lang.String", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DISCOVERY_APPROVAL_POLICY", + "DefaultValue": "Automatic", + "Value": "Automatic", + "DataType": "java.lang.String", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION", + "DefaultValue": "false", + "Value": "true", + "DataType": "java.lang.Boolean", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DEVICE_PREFERRED_NAME", + "DefaultValue": "HOST_NAME", + "Value": "PREFER_DNS,PREFER_IDRAC_HOSTNAME", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "INVALID_DEVICE_HOSTNAME", + "DefaultValue": "", + "Value": "localhost", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "COMMON_MAC_ADDRESSES", + "DefaultValue": "", + "Value": "::", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MIN_PROTOCOL_VERSION", + "DefaultValue": "V2", + "Value": "V2", + "DataType": "java.lang.String", + "GroupName": "CIFS_PROTOCOL_SETTINGS"}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "CONSOLE_CONNECTION_SETTING", + "DefaultValue": "last_known", + "Value": "last_known", + "DataType": "java.lang.String", + "GroupName": "CONSOLE_CONNECTION_SETTING"}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "SHARE_TYPE", + "DefaultValue": "CIFS", + "Value": "CIFS", + "DataType": "java.lang.String", + "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}]}, } + ]) + def test_create_payload_success(self, params, ome_connection_mock_for_application_console_preferences, + ome_response_mock, ome_default_args, mocker): + ome_response_mock.success = True + ome_response_mock.json_data = {"value": [params["payload"]]} + f_module = self.get_module_mock(params=params['module_args']) + curr_payload = params["json_data"]["value"] + ret_payload, payload_dict = self.module.create_payload(ome_connection_mock_for_application_console_preferences, + curr_payload) + assert payload_dict == params["curr_payload"] + + @pytest.mark.parametrize("params", + [ + {"module_args": + { + "metrics_collection_settings": "361" + }, + "payload": + {"ConsoleSetting": + [ + { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "361", + "DataType": "java.lang.Integer", + "GroupName": "" + } + ]}, + "curr_payload": + {"DATA_PURGE_INTERVAL": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "361", + "DataType": "java.lang.Integer", + "GroupName": ""}, + "TRAP_FORWARDING_SETTING": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "TRAP_FORWARDING_SETTING", + "DefaultValue": "AsIs", + "Value": "Normalized", + "DataType": "java.lang.String", + "GroupName": ""}, + "MX7000_ONBOARDING_PREF": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MX7000_ONBOARDING_PREF", + "DefaultValue": "all", + "Value": "all", + "DataType": "java.lang.String", + "GroupName": ""}, + "REPORTS_MAX_RESULTS_LIMIT": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "REPORTS_MAX_RESULTS_LIMIT", + "DefaultValue": "0", + "Value": "123", + "DataType": "java.lang.Integer", + "GroupName": ""}, + "EMAIL_SENDER": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "EMAIL_SENDER", + "DefaultValue": "omcadmin@dell.com", + "Value": "admin@dell.com", + "DataType": "java.lang.String", + "GroupName": ""}, + "DISCOVERY_APPROVAL_POLICY": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DISCOVERY_APPROVAL_POLICY", + "DefaultValue": "Automatic", + "Value": "Automatic", + "DataType": "java.lang.String", + "GroupName": ""}, + "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION", + "DefaultValue": "false", + "Value": "true", + "DataType": "java.lang.Boolean", + "GroupName": ""}, + "DEVICE_PREFERRED_NAME": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DEVICE_PREFERRED_NAME", + "DefaultValue": "HOST_NAME", + "Value": "PREFER_DNS,PREFER_IDRAC_HOSTNAME", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + "INVALID_DEVICE_HOSTNAME": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "INVALID_DEVICE_HOSTNAME", + "DefaultValue": "", + "Value": "localhost", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + "COMMON_MAC_ADDRESSES": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "COMMON_MAC_ADDRESSES", + "DefaultValue": "", + "Value": "::", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + "MIN_PROTOCOL_VERSION": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MIN_PROTOCOL_VERSION", + "DefaultValue": "V2", + "Value": "V2", + "DataType": "java.lang.String", + "GroupName": "CIFS_PROTOCOL_SETTINGS"}, + "CONSOLE_CONNECTION_SETTING": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "CONSOLE_CONNECTION_SETTING", + "DefaultValue": "last_known", + "Value": "last_known", + "DataType": "java.lang.String", + "GroupName": "CONSOLE_CONNECTION_SETTING"}, + "SHARE_TYPE": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "SHARE_TYPE", + "DefaultValue": "CIFS", + "Value": "CIFS", + "DataType": "java.lang.String", + "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}}, + "json_data": {"value": [ + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "361", + "DataType": "java.lang.Integer", + "GroupName": "" + }, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "TRAP_FORWARDING_SETTING", + "DefaultValue": "AsIs", + "Value": "Normalized", + "DataType": "java.lang.String", + "GroupName": "" + }, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MX7000_ONBOARDING_PREF", + "DefaultValue": "all", + "Value": "all", + "DataType": "java.lang.String", + "GroupName": "" + }, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "REPORTS_MAX_RESULTS_LIMIT", + "DefaultValue": "0", + "Value": "123", + "DataType": "java.lang.Integer", + "GroupName": "" + }, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "EMAIL_SENDER", + "DefaultValue": "omcadmin@dell.com", + "Value": "admin@dell.com", + "DataType": "java.lang.String", + "GroupName": "" + }, + { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DISCOVERY_APPROVAL_POLICY", + "DefaultValue": "Automatic", + "Value": "Automatic", + "DataType": "java.lang.String", + "GroupName": "" + }, + { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION", + "DefaultValue": "false", + "Value": "true", + "DataType": "java.lang.Boolean", + "GroupName": "" + }, + { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DEVICE_PREFERRED_NAME", + "DefaultValue": "HOST_NAME", + "Value": "PREFER_DNS,PREFER_IDRAC_HOSTNAME", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING" + }, + { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "INVALID_DEVICE_HOSTNAME", + "DefaultValue": "", + "Value": "localhost", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING" + }, + { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "COMMON_MAC_ADDRESSES", + "DefaultValue": "", + "Value": "::", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING" + }, + { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MIN_PROTOCOL_VERSION", + "DefaultValue": "V2", + "Value": "V2", + "DataType": "java.lang.String", + "GroupName": "CIFS_PROTOCOL_SETTINGS" + }, + { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "CONSOLE_CONNECTION_SETTING", + "DefaultValue": "last_known", + "Value": "last_known", + "DataType": "java.lang.String", + "GroupName": "CONSOLE_CONNECTION_SETTING" + }, + { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "SHARE_TYPE", + "DefaultValue": "CIFS", + "Value": "CIFS", + "DataType": "java.lang.String", + "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}]}, }]) + def test_create_payload_success_case02(self, params, ome_connection_mock_for_application_console_preferences, + ome_response_mock, ome_default_args, mocker): + ome_response_mock.success = True + # ome_response_mock.json_data = {"value": [params["payload"]]} + f_module = self.get_module_mock(params=params['module_args']) + curr_payload = params["json_data"]["value"] + ret_payload, payload_dict = self.module.create_payload(f_module, curr_payload) + assert ret_payload == params["payload"] + + @pytest.mark.parametrize("params", [{"module_args": {"builtin_appliance_share": {"share_options": "CIFS", + "cifs_options": "V1"}}, + "payload": {"ConsoleSetting": [{"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MIN_PROTOCOL_VERSION", + "DefaultValue": "V2", + "Value": "V1", + "DataType": "java.lang.String", + "GroupName": "CIFS_PROTOCOL_SETTINGS"}]}, + "curr_payload": {"DATA_PURGE_INTERVAL": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "361", + "DataType": "java.lang.Integer", + "GroupName": ""}, + "TRAP_FORWARDING_SETTING": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "TRAP_FORWARDING_SETTING", + "DefaultValue": "AsIs", + "Value": "Normalized", + "DataType": "java.lang.String", + "GroupName": ""}, + "MX7000_ONBOARDING_PREF": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MX7000_ONBOARDING_PREF", + "DefaultValue": "all", + "Value": "all", + "DataType": "java.lang.String", + "GroupName": ""}, + "REPORTS_MAX_RESULTS_LIMIT": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "REPORTS_MAX_RESULTS_LIMIT", + "DefaultValue": "0", + "Value": "123", + "DataType": "java.lang.Integer", + "GroupName": ""}, + "EMAIL_SENDER": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "EMAIL_SENDER", + "DefaultValue": "omcadmin@dell.com", + "Value": "admin@dell.com", + "DataType": "java.lang.String", + "GroupName": ""}, + "DISCOVERY_APPROVAL_POLICY": { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DISCOVERY_APPROVAL_POLICY", + "DefaultValue": "Automatic", + "Value": "Automatic", + "DataType": "java.lang.String", + "GroupName": ""}, + "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION": { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION", + "DefaultValue": "false", + "Value": "true", + "DataType": "java.lang.Boolean", + "GroupName": ""}, + "DEVICE_PREFERRED_NAME": { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DEVICE_PREFERRED_NAME", + "DefaultValue": "HOST_NAME", + "Value": "PREFER_DNS,PREFER_IDRAC_HOSTNAME", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + "INVALID_DEVICE_HOSTNAME": { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "INVALID_DEVICE_HOSTNAME", + "DefaultValue": "", + "Value": "localhost", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + "COMMON_MAC_ADDRESSES": { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "COMMON_MAC_ADDRESSES", + "DefaultValue": "", + "Value": "::", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + "MIN_PROTOCOL_VERSION": { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MIN_PROTOCOL_VERSION", + "DefaultValue": "V2", + "Value": "V2", + "DataType": "java.lang.String", + "GroupName": "CIFS_PROTOCOL_SETTINGS"}, + "CONSOLE_CONNECTION_SETTING": { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "CONSOLE_CONNECTION_SETTING", + "DefaultValue": "last_known", + "Value": "last_known", + "DataType": "java.lang.String", + "GroupName": "CONSOLE_CONNECTION_SETTING"}, + "SHARE_TYPE": { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "SHARE_TYPE", + "DefaultValue": "CIFS", + "Value": "CIFS", + "DataType": "java.lang.String", + "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}}, + "json_data": {"value": [{"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "361", + "DataType": "java.lang.Integer", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "TRAP_FORWARDING_SETTING", + "DefaultValue": "AsIs", + "Value": "Normalized", + "DataType": "java.lang.String", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MX7000_ONBOARDING_PREF", + "DefaultValue": "all", + "Value": "all", + "DataType": "java.lang.String", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "REPORTS_MAX_RESULTS_LIMIT", + "DefaultValue": "0", + "Value": "123", + "DataType": "java.lang.Integer", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "EMAIL_SENDER", + "DefaultValue": "omcadmin@dell.com", + "Value": "admin@dell.com", + "DataType": "java.lang.String", + "GroupName": ""}, + { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DISCOVERY_APPROVAL_POLICY", + "DefaultValue": "Automatic", + "Value": "Automatic", + "DataType": "java.lang.String", + "GroupName": ""}, + { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION", + "DefaultValue": "false", + "Value": "true", + "DataType": "java.lang.Boolean", + "GroupName": ""}, + { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DEVICE_PREFERRED_NAME", + "DefaultValue": "HOST_NAME", + "Value": "PREFER_DNS,PREFER_IDRAC_HOSTNAME", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "INVALID_DEVICE_HOSTNAME", + "DefaultValue": "", + "Value": "localhost", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "COMMON_MAC_ADDRESSES", + "DefaultValue": "", + "Value": "::", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MIN_PROTOCOL_VERSION", + "DefaultValue": "V2", + "Value": "V2", + "DataType": "java.lang.String", + "GroupName": "CIFS_PROTOCOL_SETTINGS"}, + { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "CONSOLE_CONNECTION_SETTING", + "DefaultValue": "last_known", + "Value": "last_known", + "DataType": "java.lang.String", + "GroupName": "CONSOLE_CONNECTION_SETTING"}, + { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "SHARE_TYPE", + "DefaultValue": "CIFS", + "Value": "CIFS", + "DataType": "java.lang.String", + "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}]}, }]) + def test_create_payload_success_case03(self, params, ome_connection_mock_for_application_console_preferences, + ome_response_mock, ome_default_args, mocker): + ome_response_mock.success = True + # ome_response_mock.json_data = {"value": [params["payload"]]} + f_module = self.get_module_mock(params=params['module_args']) + curr_payload = params["json_data"]["value"] + ret_payload, payload_dict = self.module.create_payload(f_module, curr_payload) + assert ret_payload == params["payload"] + + @pytest.mark.parametrize("params", [ + { + "payload": { + "ConsoleSetting": [ + { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "361", + "DataType": "java.lang.Integer", + "GroupName": "" + }, + { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "TRAP_FORWARDING_SETTING", + "DefaultValue": "AsIs", + "Value": "AsIs", + "DataType": "java.lang.String", + "GroupName": "" + }, + { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DEVICE_PREFERRED_NAME", + "DefaultValue": "SLOT_NAME", + "Value": "PREFER_DNS,PREFER_IDRAC_SYSTEM_HOSTNAME", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING" + } + ] + }, + "cifs_payload": { + "ConsoleSetting": [ + { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "SHARE_TYPE", + "DefaultValue": "CIFS", + "Value": "CIFS", + "DataType": "java.lang.String", + "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS" + } + ] + }, + "job_payload": {"Id": 0, + "JobName": "Global Health Task", + "JobDescription": "Global Health Task", + "Schedule": None, + "State": "Enabled", + "JobType": {"Id": 6, "Name": "Health_Task"}, + "Params": [{"Key": "metricType", "Value": "40, 50"}], + "Targets": [{"Id": 500, "Data": "", "TargetType": {"Id": 6000, "Name": "GROUP"}}]}, + "job_data": + { + "@odata.type": "#JobService.Job", + "@odata.id": "/api/JobService/Jobs(10093)", + "Id": 10093, + "JobName": "Global Health Task", + "JobDescription": "Global Health Task", + "NextRun": "2022-03-15 05:25:00.0", + "LastRun": "2022-03-15 05:24:00.043", + "StartTime": None, + "EndTime": None, + "Schedule": "0 0/1 * 1/1 * ? *", + "State": "Enabled", + "CreatedBy": "admin", + "UpdatedBy": None, + "Visible": None, + "Editable": None, + "Builtin": False, + "UserGenerated": True, + "Targets": [{"JobId": 10093, "Id": 500, "Data": "", "TargetType": {"Id": 6000, "Name": "GROUP"}}], + "Params": [{"JobId": 10093, "Key": "metricType", "Value": "40, 50"}], + "LastRunStatus": {"@odata.type": "#JobService.JobStatus", "Id": 2060, "Name": "Completed"}, + "JobType": {"@odata.type": "#JobService.JobType", "Id": 6, "Name": "Health_Task", + "Internal": False}, + "JobStatus": {"@odata.type": "#JobService.JobStatus", "Id": 2020, "Name": "Scheduled"}, + "ExecutionHistories@odata.navigationLink": "/api/JobService/Jobs(10093)/ExecutionHistories", + "LastExecutionDetail": {"@odata.id": "/api/JobService/Jobs(10093)/LastExecutionDetail"}}, + "payload_dict": {"DATA_PURGE_INTERVAL": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "361", + "DataType": "java.lang.Integer", + "GroupName": "" + }, + "TRAP_FORWARDING_SETTING": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "TRAP_FORWARDING_SETTING", + "DefaultValue": "AsIs", + "Value": "Normalized", + "DataType": "java.lang.String", + "GroupName": "" + }, + "MX7000_ONBOARDING_PREF": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MX7000_ONBOARDING_PREF", + "DefaultValue": "all", + "Value": "all", + "DataType": "java.lang.String", + "GroupName": "" + }, + "REPORTS_MAX_RESULTS_LIMIT": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "REPORTS_MAX_RESULTS_LIMIT", + "DefaultValue": "0", + "Value": "123", + "DataType": "java.lang.Integer", + "GroupName": "" + }, + "EMAIL_SENDER": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "EMAIL_SENDER", + "DefaultValue": "omcadmin@dell.com", + "Value": "admin@dell.com", + "DataType": "java.lang.String", + "GroupName": "" + }, + "DISCOVERY_APPROVAL_POLICY": { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DISCOVERY_APPROVAL_POLICY", + "DefaultValue": "Automatic", + "Value": "Automatic", + "DataType": "java.lang.String", + "GroupName": ""}, + "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION": { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION", + "DefaultValue": "false", + "Value": "true", + "DataType": "java.lang.Boolean", + "GroupName": ""}, + "DEVICE_PREFERRED_NAME": { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DEVICE_PREFERRED_NAME", + "DefaultValue": "HOST_NAME", + "Value": "PREFER_DNS,PREFER_IDRAC_HOSTNAME", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + "INVALID_DEVICE_HOSTNAME": { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "INVALID_DEVICE_HOSTNAME", + "DefaultValue": "", + "Value": "localhost", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + "COMMON_MAC_ADDRESSES": { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "COMMON_MAC_ADDRESSES", + "DefaultValue": "", + "Value": "::", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + "MIN_PROTOCOL_VERSION": { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MIN_PROTOCOL_VERSION", + "DefaultValue": "V2", + "Value": "V2", + "DataType": "java.lang.String", + "GroupName": "CIFS_PROTOCOL_SETTINGS"}, + "CONSOLE_CONNECTION_SETTING": { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "CONSOLE_CONNECTION_SETTING", + "DefaultValue": "last_known", + "Value": "last_known", + "DataType": "java.lang.String", + "GroupName": "CONSOLE_CONNECTION_SETTING"}, + "SHARE_TYPE": { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "SHARE_TYPE", + "DefaultValue": "CIFS", + "Value": "CIFS", + "DataType": "java.lang.String", + "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"} + + }, + "schedule": None, + "module_args": { + "report_row_limit": 123, + } + } + ]) + def test_update_console_preferences(self, params, ome_connection_mock_for_application_console_preferences, + ome_response_mock, ome_default_args): + ome_response_mock.success = True + ome_default_args.update(params["module_args"]) + # ome_response_mock.json_data = {"value": [params["payload"]]} + f_module = self.get_module_mock(params=params['module_args']) + final_resp, cifs_resp, job_resp = self.module.update_console_preferences(f_module, ome_connection_mock_for_application_console_preferences, + params["payload"], params["cifs_payload"], + params["job_payload"], params["job_data"], + params["payload_dict"], params["schedule"]) + assert final_resp.status_code == 200 + + @pytest.mark.parametrize("params", [ + { + "payload": { + "ConsoleSetting": [ + { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "361", + "DataType": "java.lang.Integer", + "GroupName": "" + }, + { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "TRAP_FORWARDING_SETTING", + "DefaultValue": "AsIs", + "Value": "AsIs", + "DataType": "java.lang.String", + "GroupName": "" + }, + { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DEVICE_PREFERRED_NAME", + "DefaultValue": "SLOT_NAME", + "Value": "PREFER_DNS,PREFER_IDRAC_SYSTEM_HOSTNAME", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING" + } + ] + }, + "cifs_payload": { + "ConsoleSetting": [ + { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "SHARE_TYPE", + "DefaultValue": "CIFS", + "Value": "CIFS", + "DataType": "java.lang.String", + "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS" + } + ] + }, + "job_payload": {"Id": 0, + "JobName": "Global Health Task", + "JobDescription": "Global Health Task", + "Schedule": None, + "State": "Enabled", + "JobType": {"Id": 6, "Name": "Health_Task"}, + "Params": [{"Key": "metricType", "Value": "40, 50"}], + "Targets": [{"Id": 500, "Data": "", "TargetType": {"Id": 6000, "Name": "GROUP"}}]}, + "job_data": + { + "@odata.type": "#JobService.Job", + "@odata.id": "/api/JobService/Jobs(10093)", + "Id": 10093, + "JobName": "Global Health Task", + "JobDescription": "Global Health Task", + "NextRun": "2022-03-15 05:25:00.0", + "LastRun": "2022-03-15 05:24:00.043", + "StartTime": None, + "EndTime": None, + "Schedule": "0 0/1 * 1/1 * ? *", + "State": "Enabled", + "CreatedBy": "admin", + "UpdatedBy": None, + "Visible": None, + "Editable": None, + "Builtin": False, + "UserGenerated": True, + "Targets": [{"JobId": 10093, "Id": 500, "Data": "", "TargetType": {"Id": 6000, "Name": "GROUP"}}], + "Params": [{"JobId": 10093, "Key": "metricType", "Value": "40, 50"}], + "LastRunStatus": {"@odata.type": "#JobService.JobStatus", "Id": 2060, "Name": "Completed"}, + "JobType": {"@odata.type": "#JobService.JobType", "Id": 6, "Name": "Health_Task", + "Internal": False}, + "JobStatus": {"@odata.type": "#JobService.JobStatus", "Id": 2020, "Name": "Scheduled"}, + "ExecutionHistories@odata.navigationLink": "/api/JobService/Jobs(10093)/ExecutionHistories", + "LastExecutionDetail": {"@odata.id": "/api/JobService/Jobs(10093)/LastExecutionDetail"}}, + "payload_dict": {"DATA_PURGE_INTERVAL": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "361", + "DataType": "java.lang.Integer", + "GroupName": ""}, + "TRAP_FORWARDING_SETTING": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "TRAP_FORWARDING_SETTING", + "DefaultValue": "AsIs", + "Value": "Normalized", + "DataType": "java.lang.String", + "GroupName": ""}, + "MX7000_ONBOARDING_PREF": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MX7000_ONBOARDING_PREF", + "DefaultValue": "all", + "Value": "all", + "DataType": "java.lang.String", + "GroupName": ""}, + "REPORTS_MAX_RESULTS_LIMIT": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "REPORTS_MAX_RESULTS_LIMIT", + "DefaultValue": "0", + "Value": "123", + "DataType": "java.lang.Integer", + "GroupName": ""}, + "EMAIL_SENDER": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "EMAIL_SENDER", + "DefaultValue": "omcadmin@dell.com", + "Value": "admin@dell.com", + "DataType": "java.lang.String", + "GroupName": ""}, + "DISCOVERY_APPROVAL_POLICY": { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DISCOVERY_APPROVAL_POLICY", + "DefaultValue": "Automatic", + "Value": "Automatic", + "DataType": "java.lang.String", + "GroupName": ""}, + "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION": { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION", + "DefaultValue": "false", + "Value": "true", + "DataType": "java.lang.Boolean", + "GroupName": ""}, + "DEVICE_PREFERRED_NAME": { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DEVICE_PREFERRED_NAME", + "DefaultValue": "HOST_NAME", + "Value": "PREFER_DNS,PREFER_IDRAC_HOSTNAME", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + "INVALID_DEVICE_HOSTNAME": { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "INVALID_DEVICE_HOSTNAME", + "DefaultValue": "", + "Value": "localhost", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + "COMMON_MAC_ADDRESSES": { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "COMMON_MAC_ADDRESSES", + "DefaultValue": "", + "Value": "::", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + "MIN_PROTOCOL_VERSION": { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MIN_PROTOCOL_VERSION", + "DefaultValue": "V2", + "Value": "V2", + "DataType": "java.lang.String", + "GroupName": "CIFS_PROTOCOL_SETTINGS"}, + "CONSOLE_CONNECTION_SETTING": { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "CONSOLE_CONNECTION_SETTING", + "DefaultValue": "last_known", + "Value": "last_known", + "DataType": "java.lang.String", + "GroupName": "CONSOLE_CONNECTION_SETTING"}, + "SHARE_TYPE": { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "SHARE_TYPE", + "DefaultValue": "CIFS", + "Value": "CIFS", + "DataType": "java.lang.String", + "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"} + + }, + "schedule": "0 0 0/5 1/1 * ? *", + "module_args": { + "builtin_appliance_share": {"share_options": "HTTPS", "cifs_options": "V2"} + } + } + ]) + def test_update_console_preferences_case02(self, params, ome_connection_mock_for_application_console_preferences, + ome_response_mock, ome_default_args): + ome_response_mock.success = True + ome_default_args.update(params["module_args"]) + # ome_response_mock.json_data = {"value": [params["payload"]]} + f_module = self.get_module_mock(params=params['module_args']) + final_resp, cifs_resp, job_resp = self.module.update_console_preferences(f_module, + ome_connection_mock_for_application_console_preferences, + params["payload"], + params["cifs_payload"], + params["job_payload"], + params["job_data"], + params["payload_dict"], + params["schedule"]) + assert cifs_resp.success is True + + @pytest.mark.parametrize("params", [{"payload": {"ConsoleSetting": [{"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "361", + "DataType": "java.lang.Integer", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "TRAP_FORWARDING_SETTING", + "DefaultValue": "AsIs", + "Value": "AsIs", + "DataType": "java.lang.String", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DEVICE_PREFERRED_NAME", + "DefaultValue": "SLOT_NAME", + "Value": "PREFER_DNS,PREFER_IDRAC_SYSTEM_HOSTNAME", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}]}, + "cifs_payload": {"ConsoleSetting": [{"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "SHARE_TYPE", + "DefaultValue": "CIFS", + "Value": "CIFS", + "DataType": "java.lang.String", + "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}]}, + "job_payload": {"Id": 0, + "JobName": "Global Health Task", + "JobDescription": "Global Health Task", + "Schedule": "0 0 0/5 1/1 * ? *", + "State": "Enabled", + "JobType": {"Id": 6, "Name": "Health_Task"}, + "Params": [{"Key": "metricType", "Value": "40, 50"}], + "Targets": [{"Id": 500, "Data": "", "TargetType": {"Id": 6000, "Name": "GROUP"}}]}, + "job_data": {"@odata.type": "#JobService.Job", + "@odata.id": "/api/JobService/Jobs(10093)", + "Id": 10093, + "JobName": "Global Health Task", + "JobDescription": "Global Health Task", + "NextRun": "2022-03-15 05:25:00.0", + "LastRun": "2022-03-15 05:24:00.043", + "StartTime": None, + "EndTime": None, + "Schedule": "0 0/1 * 1/1 * ? *", + "State": "Enabled", + "CreatedBy": "admin", + "UpdatedBy": None, + "Visible": None, + "Editable": None, + "Builtin": False, + "UserGenerated": True, + "Targets": [{"JobId": 10093, "Id": 500, "Data": "", "TargetType": {"Id": 6000, "Name": "GROUP"}}], + "Params": [{"JobId": 10093, "Key": "metricType", "Value": "40, 50"}], + "LastRunStatus": {"@odata.type": "#JobService.JobStatus", "Id": 2060, "Name": "Completed"}, + "JobType": {"@odata.type": "#JobService.JobType", "Id": 6, "Name": "Health_Task", "Internal": False}, + "JobStatus": {"@odata.type": "#JobService.JobStatus", "Id": 2020, "Name": "Scheduled"}, + "ExecutionHistories@odata.navigationLink": "/api/JobService/Jobs(10093)/ExecutionHistories", + "LastExecutionDetail": {"@odata.id": "/api/JobService/Jobs(10093)/LastExecutionDetail"}}, + "payload_dict": {"DATA_PURGE_INTERVAL": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "361", + "DataType": "java.lang.Integer", + "GroupName": ""}, + "TRAP_FORWARDING_SETTING": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "TRAP_FORWARDING_SETTING", + "DefaultValue": "AsIs", + "Value": "Normalized", + "DataType": "java.lang.String", + "GroupName": ""}, + "MX7000_ONBOARDING_PREF": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MX7000_ONBOARDING_PREF", + "DefaultValue": "all", + "Value": "all", + "DataType": "java.lang.String", + "GroupName": ""}, + "REPORTS_MAX_RESULTS_LIMIT": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "REPORTS_MAX_RESULTS_LIMIT", + "DefaultValue": "0", + "Value": "123", + "DataType": "java.lang.Integer", + "GroupName": ""}, + "EMAIL_SENDER": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "EMAIL_SENDER", + "DefaultValue": "omcadmin@dell.com", + "Value": "admin@dell.com", + "DataType": "java.lang.String", + "GroupName": ""}, + "DISCOVERY_APPROVAL_POLICY": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DISCOVERY_APPROVAL_POLICY", + "DefaultValue": "Automatic", + "Value": "Automatic", + "DataType": "java.lang.String", + "GroupName": ""}, + "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "NODE_INITIATED_DISCOVERY_SET_TRAP_" + "DESTINATION", + "DefaultValue": "false", + "Value": "true", + "DataType": "java.lang.Boolean", + "GroupName": ""}, + "DEVICE_PREFERRED_NAME": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DEVICE_PREFERRED_NAME", + "DefaultValue": "HOST_NAME", + "Value": "PREFER_DNS,PREFER_IDRAC_HOSTNAME", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + "INVALID_DEVICE_HOSTNAME": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "INVALID_DEVICE_HOSTNAME", + "DefaultValue": "", + "Value": "localhost", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + "COMMON_MAC_ADDRESSES": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "COMMON_MAC_ADDRESSES", + "DefaultValue": "", + "Value": "::", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + "MIN_PROTOCOL_VERSION": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MIN_PROTOCOL_VERSION", + "DefaultValue": "V2", + "Value": "V2", + "DataType": "java.lang.String", + "GroupName": "CIFS_PROTOCOL_SETTINGS"}, + "CONSOLE_CONNECTION_SETTING": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "CONSOLE_CONNECTION_SETTING", + "DefaultValue": "last_known", + "Value": "last_known", + "DataType": "java.lang.String", + "GroupName": "CONSOLE_CONNECTION_SETTING"}, + "SHARE_TYPE": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "SHARE_TYPE", + "DefaultValue": "CIFS", + "Value": "CIFS", + "DataType": "java.lang.String", + "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}}, + "schedule": "0 0 0/5 1/1 * ? *", + "module_args": {"device_health": {"health_check_interval": 50, + "health_check_interval_unit": "Minutes"}}}]) + def test_update_console_preferences_case03(self, params, ome_connection_mock_for_application_console_preferences, + ome_response_mock, ome_default_args): + ome_response_mock.success = True + ome_default_args.update(params["module_args"]) + # ome_response_mock.json_data = {"value": [params["payload"]]} + f_module = self.get_module_mock(params=params['module_args']) + final_resp, cifs_resp, job_resp = self.module.update_console_preferences(f_module, + ome_connection_mock_for_application_console_preferences, + params["payload"], + params["cifs_payload"], + params["job_payload"], + params["job_data"], + params["payload_dict"], + params["schedule"]) + assert job_resp.success is True + + @pytest.mark.parametrize("params", [{"module_args": {"report_row_limit": 123}, + "payload": {"ConsoleSetting": [{"Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "361", + "DataType": "java.lang.Integer", + "GroupName": ""}, + {"Name": "TRAP_FORWARDING_SETTING", + "DefaultValue": "AsIs", + "Value": "AsIs", + "DataType": "java.lang.String", + "GroupName": ""}, + {"Name": "DEVICE_PREFERRED_NAME", + "DefaultValue": "SLOT_NAME", + "Value": "PREFER_DNS,PREFER_IDRAC_SYSTEM_HOSTNAME", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}]}, + "curr_payload": {"DATA_PURGE_INTERVAL": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "361", + "DataType": "java.lang.Integer", + "GroupName": ""}, + "TRAP_FORWARDING_SETTING": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "TRAP_FORWARDING_SETTING", + "DefaultValue": "AsIs", + "Value": "Normalized", + "DataType": "java.lang.String", + "GroupName": ""}, + "MX7000_ONBOARDING_PREF": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MX7000_ONBOARDING_PREF", + "DefaultValue": "all", + "Value": "all", + "DataType": "java.lang.String", + "GroupName": ""}, + "REPORTS_MAX_RESULTS_LIMIT": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "REPORTS_MAX_RESULTS_LIMIT", + "DefaultValue": "0", + "Value": "123", + "DataType": "java.lang.Integer", + "GroupName": ""}, + "EMAIL_SENDER": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "EMAIL_SENDER", + "DefaultValue": "omcadmin@dell.com", + "Value": "admin@dell.com", + "DataType": "java.lang.String", + "GroupName": ""}, + "DISCOVERY_APPROVAL_POLICY": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DISCOVERY_APPROVAL_POLICY", + "DefaultValue": "Automatic", + "Value": "Automatic", + "DataType": "java.lang.String", + "GroupName": ""}, + "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "NODE_INITIATED_DISCOVERY_SET_TRAP_" + "DESTINATION", + "DefaultValue": "false", + "Value": "true", + "DataType": "java.lang.Boolean", + "GroupName": ""}, + "DEVICE_PREFERRED_NAME": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DEVICE_PREFERRED_NAME", + "DefaultValue": "HOST_NAME", + "Value": "PREFER_DNS,PREFER_IDRAC_HOSTNAME", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + "INVALID_DEVICE_HOSTNAME": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "INVALID_DEVICE_HOSTNAME", + "DefaultValue": "", + "Value": "localhost", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + "COMMON_MAC_ADDRESSES": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "COMMON_MAC_ADDRESSES", + "DefaultValue": "", + "Value": "::", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + "MIN_PROTOCOL_VERSION": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MIN_PROTOCOL_VERSION", + "DefaultValue": "V2", + "Value": "V2", + "DataType": "java.lang.String", + "GroupName": "CIFS_PROTOCOL_SETTINGS"}, + "CONSOLE_CONNECTION_SETTING": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "CONSOLE_CONNECTION_SETTING", + "DefaultValue": "last_known", + "Value": "last_known", + "DataType": "java.lang.String", + "GroupName": "CONSOLE_CONNECTION_SETTING"}, + "SHARE_TYPE": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "SHARE_TYPE", + "DefaultValue": "CIFS", + "Value": "CIFS", + "DataType": "java.lang.String", + "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}}, + "json_data": {"value": [{"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "361", + "DataType": "java.lang.Integer", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "TRAP_FORWARDING_SETTING", + "DefaultValue": "AsIs", + "Value": "Normalized", + "DataType": "java.lang.String", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MX7000_ONBOARDING_PREF", + "DefaultValue": "all", + "Value": "all", + "DataType": "java.lang.String", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "REPORTS_MAX_RESULTS_LIMIT", + "DefaultValue": "0", + "Value": "123", + "DataType": "java.lang.Integer", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "EMAIL_SENDER", + "DefaultValue": "omcadmin@dell.com", + "Value": "admin@dell.com", + "DataType": "java.lang.String", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DISCOVERY_APPROVAL_POLICY", + "DefaultValue": "Automatic", + "Value": "Automatic", + "DataType": "java.lang.String", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION", + "DefaultValue": "false", + "Value": "true", + "DataType": "java.lang.Boolean", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DEVICE_PREFERRED_NAME", + "DefaultValue": "HOST_NAME", + "Value": "PREFER_DNS,PREFER_IDRAC_HOSTNAME", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "INVALID_DEVICE_HOSTNAME", + "DefaultValue": "", + "Value": "localhost", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "COMMON_MAC_ADDRESSES", + "DefaultValue": "", + "Value": "::", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MIN_PROTOCOL_VERSION", + "DefaultValue": "V2", + "Value": "V2", + "DataType": "java.lang.String", + "GroupName": "CIFS_PROTOCOL_SETTINGS"}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "CONSOLE_CONNECTION_SETTING", + "DefaultValue": "last_known", + "Value": "last_known", + "DataType": "java.lang.String", + "GroupName": "CONSOLE_CONNECTION_SETTING"}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "SHARE_TYPE", + "DefaultValue": "CIFS", + "Value": "CIFS", + "DataType": "java.lang.String", + "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}]}, }]) + def test_create_payload_dict(self, params, ome_connection_mock_for_application_console_preferences, + ome_response_mock, ome_default_args, mocker): + ome_response_mock.success = True + ome_response_mock.json_data = {"value": [params["payload"]]} + f_module = self.get_module_mock(params=params['module_args']) + curr_payload = params["json_data"]["value"] + ret_payload = self.module.create_payload_dict(curr_payload) + assert ret_payload == params["curr_payload"] + + @pytest.mark.parametrize("params", [{"module_args": {"builtin_appliance_share": {"share_options": "CIFS", + "cifs_options": "V2"}}, + "payload": {"ConsoleSetting": [{"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "SHARE_TYPE", + "DefaultValue": "CIFS", + "Value": "CIFS", + "DataType": "java.lang.String", + "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}]}, + "curr_payload": {"DATA_PURGE_INTERVAL": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "361", + "DataType": "java.lang.Integer", + "GroupName": ""}, + "TRAP_FORWARDING_SETTING": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "TRAP_FORWARDING_SETTING", + "DefaultValue": "AsIs", + "Value": "Normalized", + "DataType": "java.lang.String", + "GroupName": ""}, + "MX7000_ONBOARDING_PREF": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MX7000_ONBOARDING_PREF", + "DefaultValue": "all", + "Value": "all", + "DataType": "java.lang.String", + "GroupName": ""}, + "REPORTS_MAX_RESULTS_LIMIT": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "REPORTS_MAX_RESULTS_LIMIT", + "DefaultValue": "0", + "Value": "123", + "DataType": "java.lang.Integer", + "GroupName": ""}, + "EMAIL_SENDER": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "EMAIL_SENDER", + "DefaultValue": "omcadmin@dell.com", + "Value": "admin@dell.com", + "DataType": "java.lang.String", + "GroupName": ""}, + "DISCOVERY_APPROVAL_POLICY": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DISCOVERY_APPROVAL_POLICY", + "DefaultValue": "Automatic", + "Value": "Automatic", + "DataType": "java.lang.String", + "GroupName": ""}, + "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "NODE_INITIATED_DISCOVERY_SET_TRAP_" + "DESTINATION", + "DefaultValue": "false", + "Value": "true", + "DataType": "java.lang.Boolean", + "GroupName": ""}, + "DEVICE_PREFERRED_NAME": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DEVICE_PREFERRED_NAME", + "DefaultValue": "HOST_NAME", + "Value": "PREFER_DNS,PREFER_IDRAC_HOSTNAME", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + "INVALID_DEVICE_HOSTNAME": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "INVALID_DEVICE_HOSTNAME", + "DefaultValue": "", + "Value": "localhost", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + "COMMON_MAC_ADDRESSES": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "COMMON_MAC_ADDRESSES", + "DefaultValue": "", + "Value": "::", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + "MIN_PROTOCOL_VERSION": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MIN_PROTOCOL_VERSION", + "DefaultValue": "V2", + "Value": "V2", + "DataType": "java.lang.String", + "GroupName": "CIFS_PROTOCOL_SETTINGS"}, + "CONSOLE_CONNECTION_SETTING": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "CONSOLE_CONNECTION_SETTING", + "DefaultValue": "last_known", + "Value": "last_known", + "DataType": "java.lang.String", + "GroupName": "CONSOLE_CONNECTION_SETTING"}, + "SHARE_TYPE": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "SHARE_TYPE", + "DefaultValue": "CIFS", + "Value": "CIFS", + "DataType": "java.lang.String", + "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}}, + "json_data": {"value": [{"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "361", + "DataType": "java.lang.Integer", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "TRAP_FORWARDING_SETTING", + "DefaultValue": "AsIs", + "Value": "Normalized", + "DataType": "java.lang.String", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MX7000_ONBOARDING_PREF", + "DefaultValue": "all", + "Value": "all", + "DataType": "java.lang.String", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "REPORTS_MAX_RESULTS_LIMIT", + "DefaultValue": "0", + "Value": "123", + "DataType": "java.lang.Integer", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "EMAIL_SENDER", + "DefaultValue": "omcadmin@dell.com", + "Value": "admin@dell.com", + "DataType": "java.lang.String", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DISCOVERY_APPROVAL_POLICY", + "DefaultValue": "Automatic", + "Value": "Automatic", + "DataType": "java.lang.String", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION", + "DefaultValue": "false", + "Value": "true", + "DataType": "java.lang.Boolean", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DEVICE_PREFERRED_NAME", + "DefaultValue": "HOST_NAME", + "Value": "PREFER_DNS,PREFER_IDRAC_HOSTNAME", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "INVALID_DEVICE_HOSTNAME", + "DefaultValue": "", + "Value": "localhost", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "COMMON_MAC_ADDRESSES", + "DefaultValue": "", + "Value": "::", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MIN_PROTOCOL_VERSION", + "DefaultValue": "V2", + "Value": "V2", + "DataType": "java.lang.String", + "GroupName": "CIFS_PROTOCOL_SETTINGS"}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "CONSOLE_CONNECTION_SETTING", + "DefaultValue": "last_known", + "Value": "last_known", + "DataType": "java.lang.String", + "GroupName": "CONSOLE_CONNECTION_SETTING"}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "SHARE_TYPE", + "DefaultValue": "CIFS", + "Value": "CIFS", + "DataType": "java.lang.String", + "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}]}, }]) + def test_create_cifs_payload(self, params, ome_connection_mock_for_application_console_preferences, + ome_response_mock, ome_default_args, mocker): + ome_response_mock.success = True + ome_response_mock.json_data = {"value": [params["payload"]]} + f_module = self.get_module_mock(params=params['module_args']) + curr_payload = params["json_data"]["value"] + ret_payload = self.module.create_cifs_payload(ome_connection_mock_for_application_console_preferences, + curr_payload) + assert ret_payload.get("ConsoleSetting")[0]["Name"] == params["payload"]["ConsoleSetting"][0]["Name"] + + @pytest.mark.parametrize("params", [{"module_args": {"device_health": {"health_check_interval": 50, + "health_check_interval_unit": "Minutes"}}, + "job_payload": {"Id": 0, + "JobName": "Global Health Task", + "JobDescription": "Global Health Task", + "Schedule": None, + "State": "Enabled", + "JobType": {"Id": 6, "Name": "Health_Task"}, + "Params": [{"Key": "metricType", "Value": "40, 50"}], + "Targets": [{"Id": 500, "Data": "", "TargetType": {"Id": 6000, "Name": "GROUP"}}]}}]) + def test_create_job(self, params, ome_connection_mock_for_application_console_preferences, + ome_response_mock, ome_default_args, mocker): + ome_response_mock.success = True + ome_response_mock.json_data = params["job_payload"] + ome_default_args.update(params['module_args']) + job_payload, schedule = self.module.create_job(ome_connection_mock_for_application_console_preferences) + assert job_payload == params["job_payload"] + + @pytest.mark.parametrize("params", [{"module_args": {"device_health": {"health_check_interval": 5, + "health_check_interval_unit": "Hourly"}}, + "job_payload": {"Id": 0, + "JobName": "Global Health Task", + "JobDescription": "Global Health Task", + "Schedule": "0 0 0/5 1/1 * ? *", + "State": "Enabled", + "JobType": {"Id": 6, "Name": "Health_Task"}, + "Params": [{"Key": "metricType", "Value": "40, 50"}], + "Targets": [{"Id": 500, "Data": "", "TargetType": {"Id": 6000, "Name": "GROUP"}}]}, + "schedule": "0 0 0/5 1/1 * ? *"}]) + def test_create_job_case02(self, params, ome_connection_mock_for_application_console_preferences, + ome_response_mock, ome_default_args, mocker): + ome_response_mock.success = True + f_module = self.get_module_mock(params=params['module_args']) + ome_response_mock.json_data = params["job_payload"] + ome_default_args.update(params['module_args']) + job_payload, schedule = self.module.create_job(f_module) + assert schedule == params["schedule"] + + @pytest.mark.parametrize("params", [{"module_args": {"device_health": {"health_check_interval": 5, + "health_check_interval_unit": "Minutes"}}, + "job_payload": {"Id": 0, + "JobName": "Global Health Task", + "JobDescription": "Global Health Task", + "Schedule": "0 0/5 * 1/1 * ? *", + "State": "Enabled", + "JobType": {"Id": 6, "Name": "Health_Task"}, + "Params": [{"Key": "metricType", "Value": "40, 50"}], + "Targets": [{"Id": 500, "Data": "", "TargetType": {"Id": 6000, "Name": "GROUP"}}]}, + "schedule": "0 0/5 * 1/1 * ? *"}]) + def test_create_job_case03(self, params, ome_connection_mock_for_application_console_preferences, + ome_response_mock, ome_default_args, mocker): + ome_response_mock.success = True + f_module = self.get_module_mock(params=params['module_args']) + ome_response_mock.json_data = params["job_payload"] + ome_default_args.update(params['module_args']) + job_payload, schedule = self.module.create_job(f_module) + assert schedule == params["schedule"] + + @pytest.mark.parametrize("params", [ + { + "module_args": {"metrics_collection_settings": 361}, + "cifs_payload": {"ConsoleSetting": [{"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "SHARE_TYPE", + "DefaultValue": "CIFS", + "Value": "CIFS", + "DataType": "java.lang.String", + "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}]}, + "cp_data": {"value": [{"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "361", + "DataType": "java.lang.Integer", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "TRAP_FORWARDING_SETTING", + "DefaultValue": "AsIs", + "Value": "Normalized", + "DataType": "java.lang.String", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MX7000_ONBOARDING_PREF", + "DefaultValue": "all", + "Value": "all", + "DataType": "java.lang.String", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "REPORTS_MAX_RESULTS_LIMIT", + "DefaultValue": "0", + "Value": "123", + "DataType": "java.lang.Integer", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "EMAIL_SENDER", + "DefaultValue": "omcadmin@dell.com", + "Value": "admin@dell.com", + "DataType": "java.lang.String", + "GroupName": ""}, ]}, + "payload": {"ConsoleSetting": [{"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "361", + "DataType": "java.lang.Integer", + "GroupName": ""}]}, }]) + def test_module_idempotent(self, mocker, params, ome_connection_mock_for_application_console_preferences, + ome_response_mock, ome_default_args): + curr_resp = params["cp_data"]["value"] + payload = params["payload"] + cifs_payload = params["cifs_payload"] + schedule = None + job = None + diff = self.module._diff_payload(curr_resp, payload, cifs_payload, schedule, job) + assert diff == 0 + + @pytest.mark.parametrize("params", [ + { + "module_args": {"metrics_collection_settings": 361}, + "cifs_payload": {"ConsoleSetting": [{"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "SHARE_TYPE", + "DefaultValue": "CIFS", + "Value": "CIFS", + "DataType": "java.lang.String", + "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}]}, + "cp_data": {"value": [{"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "361", + "DataType": "java.lang.Integer", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "TRAP_FORWARDING_SETTING", + "DefaultValue": "AsIs", + "Value": "Normalized", + "DataType": "java.lang.String", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MX7000_ONBOARDING_PREF", + "DefaultValue": "all", + "Value": "all", + "DataType": "java.lang.String", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "REPORTS_MAX_RESULTS_LIMIT", + "DefaultValue": "0", + "Value": "123", + "DataType": "java.lang.Integer", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "EMAIL_SENDER", + "DefaultValue": "omcadmin@dell.com", + "Value": "admin@dell.com", + "DataType": "java.lang.String", + "GroupName": ""}, ]}, + "payload": {"ConsoleSetting": [{"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "365", + "DataType": "java.lang.Integer", + "GroupName": ""}]}, }]) + def test_module_idempotent_case02(self, mocker, params, ome_connection_mock_for_application_console_preferences, + ome_response_mock, ome_default_args): + curr_resp = params["cp_data"]["value"] + payload = params["payload"] + cifs_payload = params["cifs_payload"] + schedule = None + job = None + diff = self.module._diff_payload(curr_resp, payload, cifs_payload, schedule, job) + assert diff == 1 + + @pytest.mark.parametrize("params", [ + { + "module_args": {"device_health": {"health_check_interval": 5, + "health_check_interval_unit": "Hourly"}}, + "json_data": {"@odata.type": "#JobService.Job", + "@odata.id": "/api/JobService/Jobs(10093)", + "Id": 10093, + "JobName": "Global Health Task", + "JobDescription": "Global Health Task", + "NextRun": "2022-03-15 05:25:00.0", + "LastRun": "2022-03-15 05:24:00.043", + "StartTime": None, + "EndTime": None, + "Schedule": "0 0 0/5 1/1 * ? *", + "State": "Enabled", + "CreatedBy": "admin", + "UpdatedBy": None, + "Visible": None, + "Editable": None, + "Builtin": False, + "UserGenerated": True, + "Targets": [{"JobId": 10093, "Id": 500, "Data": "", "TargetType": {"Id": 6000, "Name": "GROUP"}}], + "Params": [{"JobId": 10093, "Key": "metricType", "Value": "40, 50"}], + "LastRunStatus": {"@odata.type": "#JobService.JobStatus", "Id": 2060, "Name": "Completed"}, + "JobType": {"@odata.type": "#JobService.JobType", "Id": 6, "Name": "Health_Task", "Internal": False}, + "JobStatus": {"@odata.type": "#JobService.JobStatus", "Id": 2020, "Name": "Scheduled"}, + "ExecutionHistories@odata.navigationLink": "/api/JobService/Jobs(10093)/ExecutionHistories", + "LastExecutionDetail": {"@odata.id": "/api/JobService/Jobs(10093)/LastExecutionDetail"}}, + "cp_data": + {"value": [{"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "361", + "DataType": "java.lang.Integer", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "TRAP_FORWARDING_SETTING", + "DefaultValue": "AsIs", + "Value": "Normalized", + "DataType": "java.lang.String", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MX7000_ONBOARDING_PREF", + "DefaultValue": "all", + "Value": "all", + "DataType": "java.lang.String", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "REPORTS_MAX_RESULTS_LIMIT", + "DefaultValue": "0", + "Value": "123", + "DataType": "java.lang.Integer", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "EMAIL_SENDER", + "DefaultValue": "omcadmin@dell.com", + "Value": "admin@dell.com", + "DataType": "java.lang.String", + "GroupName": ""}, ]}, + "schedule": "0 0 0/5 1/1 * ? *", + "payload": {"ConsoleSetting": [{"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "365", + "DataType": "java.lang.Integer", + "GroupName": ""}]}, + "cifs_payload": {"ConsoleSetting": [{"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "SHARE_TYPE", + "DefaultValue": "CIFS", + "Value": "CIFS", + "DataType": "java.lang.String", + "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}]}}]) + def test_module_idempotent_case03(self, mocker, params, ome_connection_mock_for_application_console_preferences, + ome_response_mock, ome_default_args): + curr_resp = params["cp_data"]["value"] + payload = params["payload"] + cifs_payload = params["cifs_payload"] + schedule = params["schedule"] + job = params["json_data"] + diff = self.module._diff_payload(curr_resp, payload, cifs_payload, schedule, job) + assert diff == 1 + + @pytest.mark.parametrize("params", [ + { + "module_args": {"device_health": {"health_check_interval": 100, + "health_check_interval_unit": "Minutes"} + }}]) + def test__validate_params_fail_case01(self, params, ome_connection_mock_for_application_console_preferences): + health = params['module_args'].get("device_health").get("health_check_interval_unit") + f_module = self.get_module_mock(params=params['module_args']) + with pytest.raises(Exception) as exc: + self.module._validate_params(f_module) + assert exc.value.args[0] == HEALTH_CHECK_INTERVAL_INVALID.format(health) + + @pytest.mark.parametrize("params", [ + { + "module_args": {"device_health": {"health_check_interval_unit": "Minutes"} + }}]) + def test__validate_params_fail_case02(self, params, ome_connection_mock_for_application_console_preferences): + f_module = self.get_module_mock(params=params['module_args']) + with pytest.raises(Exception) as exc: + self.module._validate_params(f_module) + assert exc.value.args[0] == HEALTH_CHECK_INTERVAL_REQUIRED + + @pytest.mark.parametrize("params", [ + { + "module_args": {"device_health": {"health_check_interval": 50} + }}]) + def test__validate_params_fail_case03(self, params, ome_connection_mock_for_application_console_preferences): + f_module = self.get_module_mock(params=params['module_args']) + with pytest.raises(Exception) as exc: + self.module._validate_params(f_module) + assert exc.value.args[0] == HEALTH_CHECK_UNIT_REQUIRED + + @pytest.mark.parametrize("params", [ + { + "module_args": {"device_health": {"health_check_interval": 100, + "health_check_interval_unit": "Hourly"} + }}]) + def test__validate_params_fail_case04(self, params, ome_connection_mock_for_application_console_preferences): + health = params['module_args'].get("device_health").get("health_check_interval_unit") + f_module = self.get_module_mock(params=params['module_args']) + with pytest.raises(Exception) as exc: + self.module._validate_params(f_module) + assert exc.value.args[0] == HEALTH_CHECK_INTERVAL_INVALID.format(health) + + @pytest.mark.parametrize("params", [ + { + "module_args": {"report_row_limit": 123, + "mx7000_onboarding_preferences": "all", + "email_sender_settings": "admin@dell.com", + "trap_forwarding_format": "Normalized", + "metrics_collection_settings": 361 + }, + "json_data": {"value": [{"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "361", + "DataType": "java.lang.Integer", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "TRAP_FORWARDING_SETTING", + "DefaultValue": "AsIs", + "Value": "Normalized", + "DataType": "java.lang.String", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MX7000_ONBOARDING_PREF", + "DefaultValue": "all", + "Value": "all", + "DataType": "java.lang.String", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "REPORTS_MAX_RESULTS_LIMIT", + "DefaultValue": "0", + "Value": "123", + "DataType": "java.lang.Integer", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "EMAIL_SENDER", + "DefaultValue": "omcadmin@dell.com", + "Value": "admin@dell.com", + "DataType": "java.lang.String", + "GroupName": ""}, ]}, }]) + def test_module_check_mode(self, mocker, params, ome_connection_mock_for_application_console_preferences, + ome_response_mock, ome_default_args): + ome_response_mock.success = True + ome_response_mock.status_code = 201 + ome_response_mock.json_data = {"value": [params["json_data"]]} + ome_default_args.update(params['module_args']) + f_module = self.get_module_mock(params=ome_default_args) + get_json_data = params["json_data"] + update_json_data = params["json_data"] + + f_module.check_mode = True + + with pytest.raises(Exception) as err: + self.module.process_check_mode(f_module, 0) + assert err.value.args[0] == NO_CHANGES + + with pytest.raises(Exception) as err: + self.module.process_check_mode(f_module, 1) + assert err.value.args[0] == CHANGES_FOUND + + f_module.check_mode = False + + with pytest.raises(Exception) as err: + self.module.process_check_mode(f_module, 0) + assert err.value.args[0] == NO_CHANGES + + @pytest.mark.parametrize("params", [ + { + "job_details": { + "@odata.type": "#JobService.Job", + "@odata.id": "/api/JobService/Jobs(10093)", + "Id": 10093, + "JobName": "Global Health Task", + "JobDescription": "Global Health Task", + "NextRun": "2022-03-15 05:25:00.0", + "LastRun": "2022-03-15 05:24:00.043", + "StartTime": None, + "EndTime": None, + "Schedule": "0 0/1 * 1/1 * ? *", + "State": "Enabled", + "CreatedBy": "admin", + "UpdatedBy": None, + "Visible": None, + "Editable": None, + "Builtin": False, + "UserGenerated": True, + "Targets": [{"JobId": 10093, "Id": 500, "Data": "", "TargetType": {"Id": 6000, "Name": "GROUP"}}], + "Params": [{"JobId": 10093, "Key": "metricType", "Value": "40, 50"}], + "LastRunStatus": {"@odata.type": "#JobService.JobStatus", "Id": 2060, "Name": "Completed"}, + "JobType": {"@odata.type": "#JobService.JobType", "Id": 6, "Name": "Health_Task", "Internal": False}, + "JobStatus": {"@odata.type": "#JobService.JobStatus", "Id": 2020, "Name": "Scheduled"}, + "ExecutionHistories@odata.navigationLink": "/api/JobService/Jobs(10093)/ExecutionHistories", + "LastExecutionDetail": {"@odata.id": "/api/JobService/Jobs(10093)/LastExecutionDetail"} + }, + "job_payload": {"Id": 0, + "JobName": "Global Health Task", + "JobDescription": "Global Health Task", + "Schedule": None, + "State": "Enabled", + "JobType": {"Id": 6, "Name": "Health_Task"}, + "Params": [{"Key": "metricType", "Value": "40, 50"}], + "Targets": [{"Id": 500, "Data": "", "TargetType": {"Id": 6000, "Name": "GROUP"}}]}, + "cp_data": {"value": [{"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "361", + "DataType": "java.lang.Integer", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "TRAP_FORWARDING_SETTING", + "DefaultValue": "AsIs", + "Value": "Normalized", + "DataType": "java.lang.String", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MX7000_ONBOARDING_PREF", + "DefaultValue": "all", + "Value": "all", + "DataType": "java.lang.String", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "REPORTS_MAX_RESULTS_LIMIT", + "DefaultValue": "0", + "Value": "123", + "DataType": "java.lang.Integer", + "GroupName": ""}, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "EMAIL_SENDER", + "DefaultValue": "omcadmin@dell.com", + "Value": "admin@dell.com", + "DataType": "java.lang.String", + "GroupName": ""}, ]}, + "payload_dict": + {"DATA_PURGE_INTERVAL": {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "361", + "DataType": "java.lang.Integer", + "GroupName": ""}, + "TRAP_FORWARDING_SETTING": + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "TRAP_FORWARDING_SETTING", + "DefaultValue": "AsIs", + "Value": "Normalized", + "DataType": "java.lang.String", + "GroupName": ""}, + "MX7000_ONBOARDING_PREF": + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MX7000_ONBOARDING_PREF", + "DefaultValue": "all", + "Value": "all", + "DataType": "java.lang.String", + "GroupName": ""}, + "REPORTS_MAX_RESULTS_LIMIT": + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "REPORTS_MAX_RESULTS_LIMIT", + "DefaultValue": "0", + "Value": "123", + "DataType": "java.lang.Integer", + "GroupName": ""}, + "EMAIL_SENDER": + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "EMAIL_SENDER", + "DefaultValue": "omcadmin@dell.com", + "Value": "admin@dell.com", + "DataType": "java.lang.String", + "GroupName": ""}, + "DISCOVERY_APPROVAL_POLICY": + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DISCOVERY_APPROVAL_POLICY", + "DefaultValue": "Automatic", + "Value": "Automatic", + "DataType": "java.lang.String", + "GroupName": ""}, + "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION": + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION", + "DefaultValue": "false", + "Value": "true", + "DataType": "java.lang.Boolean", + "GroupName": ""}, + "DEVICE_PREFERRED_NAME": + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DEVICE_PREFERRED_NAME", + "DefaultValue": "HOST_NAME", + "Value": "PREFER_DNS,PREFER_IDRAC_HOSTNAME", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + "INVALID_DEVICE_HOSTNAME": + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "INVALID_DEVICE_HOSTNAME", + "DefaultValue": "", + "Value": "localhost", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + "COMMON_MAC_ADDRESSES": + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "COMMON_MAC_ADDRESSES", + "DefaultValue": "", + "Value": "::", + "DataType": "java.lang.String", + "GroupName": "DISCOVERY_SETTING"}, + "MIN_PROTOCOL_VERSION": { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MIN_PROTOCOL_VERSION", + "DefaultValue": "V2", + "Value": "V2", + "DataType": "java.lang.String", + "GroupName": "CIFS_PROTOCOL_SETTINGS"}, + "CONSOLE_CONNECTION_SETTING": { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "CONSOLE_CONNECTION_SETTING", + "DefaultValue": "last_known", + "Value": "last_known", + "DataType": "java.lang.String", + "GroupName": "CONSOLE_CONNECTION_SETTING"}, + "SHARE_TYPE": { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "SHARE_TYPE", + "DefaultValue": "CIFS", + "Value": "CIFS", + "DataType": "java.lang.String", + "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}}, + "payload": + {"ConsoleSetting": + [ + { + "@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "361", + "DataType": "java.lang.Integer", + "GroupName": "" + }]}, + "cifs_payload": + {"ConsoleSetting": []}, + "module_args": {"metrics_collection_settings": 300}, + "json_data": {"value": [{"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "DATA_PURGE_INTERVAL", + "DefaultValue": "365", + "Value": "361", + "DataType": "java.lang.Integer", + "GroupName": "" + }, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "TRAP_FORWARDING_SETTING", + "DefaultValue": "AsIs", + "Value": "Normalized", + "DataType": "java.lang.String", + "GroupName": "" + }, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "MX7000_ONBOARDING_PREF", + "DefaultValue": "all", + "Value": "all", + "DataType": "java.lang.String", + "GroupName": "" + }, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "REPORTS_MAX_RESULTS_LIMIT", + "DefaultValue": "0", + "Value": "123", + "DataType": "java.lang.Integer", + "GroupName": "" + }, + {"@odata.type": "#ApplicationService.ConsoleSetting", + "Name": "EMAIL_SENDER", + "DefaultValue": "omcadmin@dell.com", + "Value": "admin@dell.com", + "DataType": "java.lang.String", + "GroupName": "" + }, ]}, }]) + def test_module_success(self, mocker, params, ome_connection_mock_for_application_console_preferences, + ome_response_mock, ome_default_args): + ome_response_mock.success = True + ome_response_mock.status_code = 201 + # ome_response_mock.json_data = params["json_data"] + ome_default_args.update(params['module_args']) + mocker.patch(MODULE_PATH + 'job_details', return_value=params["job_details"]) + mocker.patch(MODULE_PATH + 'create_job', return_value=(None, None)) + mocker.patch(MODULE_PATH + 'fetch_cp_settings', return_value=params["cp_data"]["value"]) + mocker.patch(MODULE_PATH + 'create_payload', return_value=(params["payload"], params["payload_dict"])) + mocker.patch(MODULE_PATH + 'create_cifs_payload', return_value=params["cifs_payload"]) + mocker.patch(MODULE_PATH + '_diff_payload', return_value=1) + # mocker.patch(MODULE_PATH + 'update_payload', return_value=update_json_data) + # mocker.patch(MODULE_PATH + '_diff_payload', return_value=1) + result = self._run_module(ome_default_args) + assert result["msg"] == SUCCESS_MSG + + @pytest.mark.parametrize("exc_type", [HTTPError, URLError]) + def test_cp_main_exception_case(self, mocker, exc_type, ome_connection_mock_for_application_console_preferences, + ome_response_mock, ome_default_args): + ome_default_args.update({"device_health": {"health_check_interval": 65, + "health_check_interval_unit": "Minutes"}}) + ome_response_mock.status_code = 400 + ome_response_mock.success = False + json_str = to_text(json.dumps({"info": "error_details"})) + if exc_type == URLError: + mocker.patch(MODULE_PATH + '_validate_params', side_effect=exc_type("url open error")) + result = self._run_module(ome_default_args) + assert result["unreachable"] is True + elif exc_type not in [HTTPError, SSLValidationError]: + mocker.patch(MODULE_PATH + '_validate_params', side_effect=exc_type("exception message")) + result = self._run_module_with_fail_json(ome_default_args) + assert result['failed'] is True + else: + mocker.patch(MODULE_PATH + '_validate_params', + side_effect=exc_type('http://testhost.com', 400, 'http error message', + {"accept-type": "application/json"}, StringIO(json_str))) + result = self._run_module_with_fail_json(ome_default_args) + assert result['failed'] is True + assert 'msg' in result diff --git a/tests/unit/plugins/modules/test_ome_device_local_access_configuration.py b/tests/unit/plugins/modules/test_ome_device_local_access_configuration.py index 79ec6bc52..23bae781c 100644 --- a/tests/unit/plugins/modules/test_ome_device_local_access_configuration.py +++ b/tests/unit/plugins/modules/test_ome_device_local_access_configuration.py @@ -2,8 +2,8 @@ # # Dell EMC OpenManage Ansible Modules -# Version 4.4.0 -# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved. +# Version 5.2.0 +# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # @@ -21,7 +21,7 @@ from ansible.module_utils._text import to_text from ansible_collections.dellemc.openmanage.plugins.modules import ome_device_local_access_configuration from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, patch, Mock +from mock import MagicMock, patch, Mock MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_device_local_access_configuration.' diff --git a/tests/unit/plugins/modules/test_ome_device_network_services.py b/tests/unit/plugins/modules/test_ome_device_network_services.py index 5456f5d32..0a68ac9d4 100644 --- a/tests/unit/plugins/modules/test_ome_device_network_services.py +++ b/tests/unit/plugins/modules/test_ome_device_network_services.py @@ -2,8 +2,8 @@ # # Dell EMC OpenManage Ansible Modules -# Version 4.3.0 -# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved. +# Version 5.2.0 +# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # @@ -23,7 +23,7 @@ from ansible.module_utils._text import to_text from ansible_collections.dellemc.openmanage.plugins.modules import ome_device_network_services from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, patch, Mock +from mock import MagicMock, patch, Mock MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_device_network_services.' diff --git a/tests/unit/plugins/modules/test_ome_device_power_settings.py b/tests/unit/plugins/modules/test_ome_device_power_settings.py index d08a9ee31..928c407c3 100644 --- a/tests/unit/plugins/modules/test_ome_device_power_settings.py +++ b/tests/unit/plugins/modules/test_ome_device_power_settings.py @@ -2,8 +2,8 @@ # # Dell EMC OpenManage Ansible Modules -# Version 4.3.0 -# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved. +# Version 5.2.0 +# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # @@ -21,7 +21,7 @@ from ansible.module_utils._text import to_text from ansible_collections.dellemc.openmanage.plugins.modules import ome_device_power_settings from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, patch, Mock +from mock import MagicMock, patch, Mock MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_device_power_settings.' diff --git a/tests/unit/plugins/modules/test_ome_diagnostics.py b/tests/unit/plugins/modules/test_ome_diagnostics.py index 69e66b572..1d6378055 100644 --- a/tests/unit/plugins/modules/test_ome_diagnostics.py +++ b/tests/unit/plugins/modules/test_ome_diagnostics.py @@ -2,8 +2,8 @@ # # Dell EMC OpenManage Ansible Modules -# Version 3.6.0 -# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved. +# Version 5.2.0 +# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # @@ -53,6 +53,13 @@ def test_group_validation(self, ome_conn_mock_diagnostics, ome_response_mock, om result = self.module.group_validation(f_module, ome_conn_mock_diagnostics) assert result == [25011] + def test_group_validation_s1(self, ome_conn_mock_diagnostics, ome_response_mock, ome_default_args, mocker): + f_module = self.get_module_mock(params={"device_group_name": "Servers"}) + ome_response_mock.json_data = {"value": [{"Type": 2000, "Id": 10161}]} + with pytest.raises(Exception) as err: + self.module.group_validation(f_module, ome_conn_mock_diagnostics) + assert err.value.args[0] == "The requested group 'Servers' does not contain devices that support export log." + def test_device_validation(self, ome_conn_mock_diagnostics, ome_response_mock, ome_default_args, mocker): resp = {"report_list": [{"Id": 25014, "DeviceServiceTag": "ZXCVB1", "Type": 1000}]} f_module = self.get_module_mock(params={"device_ids": [25011]}) @@ -79,9 +86,17 @@ def test_extract_log_operation(self, ome_conn_mock_diagnostics, ome_response_moc f_module = self.get_module_mock(params={"log_type": "application", "share_address": "192.168.0.1", "share_type": "NFS", "share_name": "iso", "share_user": "username", "share_password": "password", "share_domain": "domain", - "mask_sensitive_info": "true", "log_selectors": ["OS_LOGS"]}) + "mask_sensitive_info": "true", "log_selectors": ["OS_LOGS"], + "lead_chassis_only": "true"}) ome_response_mock.json_data = {"value": [{"Id": 16011, "Type": 2000}]} ome_conn_mock_diagnostics.job_submission.return_value = {"Id": 16011} + ome_conn_mock_diagnostics.get_all_items_with_pagination.return_value = \ + {"value": [{"DomainRoleTypeValue": "LEAD", "DeviceId": 16011}]} + result = self.module.extract_log_operation(f_module, ome_conn_mock_diagnostics) + assert result["Id"] == 16011 + + ome_conn_mock_diagnostics.get_all_items_with_pagination.return_value = \ + {"value": [{"DomainRoleTypeValue": "STANDALONE", "DeviceId": 16011}]} result = self.module.extract_log_operation(f_module, ome_conn_mock_diagnostics) assert result["Id"] == 16011 @@ -92,6 +107,41 @@ def test_extract_log_operation(self, ome_conn_mock_diagnostics, ome_response_moc result = self.module.extract_log_operation(f_module, ome_conn_mock_diagnostics, device_lst=[25012]) assert result["Id"] == 16011 + def test_extract_log_operation_member(self, ome_conn_mock_diagnostics, ome_response_mock, ome_default_args, mocker): + f_module = self.get_module_mock(params={"log_type": "application", "share_address": "192.168.0.1", + "share_type": "NFS", "share_name": "iso", "share_user": "username", + "share_password": "password", "share_domain": "domain", + "mask_sensitive_info": "true", "log_selectors": ["OS_LOGS"], + "lead_chassis_only": "true"}) + ome_response_mock.json_data = {"value": [{"Id": 16011, "Type": 2000}]} + ome_conn_mock_diagnostics.job_submission.return_value = {"Id": 16011} + ome_conn_mock_diagnostics.get_all_items_with_pagination.return_value = \ + {"value": [{"DomainRoleTypeValue": "MEMBER", "DeviceId": 16011}]} + with pytest.raises(Exception) as err: + self.module.extract_log_operation(f_module, ome_conn_mock_diagnostics) + assert err.value.args[0] == "There is no device(s) available to export application log." + + def test_extract_log_operation_no_lead_chassis(self, ome_conn_mock_diagnostics, ome_response_mock, ome_default_args, mocker): + f_module = self.get_module_mock(params={"lead_chassis_only": False, "log_type": "application", + "share_address": "192.168.0.1", + "share_type": "NFS", "share_name": "iso", "share_user": "username", + "share_password": "password", "share_domain": "domain", + "mask_sensitive_info": "true", "log_selectors": ["OS_LOGS"], }) + ome_response_mock.json_data = {"value": [{"Id": 16011, "Type": 2000}]} + ome_conn_mock_diagnostics.job_submission.return_value = {"Id": 16011} + result = self.module.extract_log_operation(f_module, ome_conn_mock_diagnostics) + assert result["Id"] == 16011 + + def test_extract_log_operation_s1(self, ome_conn_mock_diagnostics, ome_response_mock, ome_default_args, mocker): + f_module = self.get_module_mock(params={"lead_chassis_only": False, "log_type": "application", + "share_address": "192.168.0.1", + "share_type": "NFS", + "mask_sensitive_info": "true", "log_selectors": ["OS_LOGS"], }) + ome_response_mock.json_data = {"value": [{"Id": 16011, "Type": 2000}]} + ome_conn_mock_diagnostics.job_submission.return_value = {"Id": 16011} + result = self.module.extract_log_operation(f_module, ome_conn_mock_diagnostics) + assert result["Id"] == 16011 + def test_main_succes_case(self, ome_conn_mock_diagnostics, ome_response_mock, ome_default_args, mocker): ome_default_args.update({"log_type": "support_assist_collection", "share_address": "192.168.0.1", "share_type": "NFS", "share_name": "iso", "share_user": "username", @@ -152,3 +202,20 @@ def test_find_failed_jobs(self, ome_conn_mock_diagnostics, ome_response_mock, om result = self.module.find_failed_jobs({"Id": 25012}, ome_conn_mock_diagnostics) assert result[0] == "Export log job completed with errors." assert result[1] is False + + ome_response_mock.json_data = { + "Id": 25011, + "value": [] + } + result = self.module.find_failed_jobs({"Id": 25012}, ome_conn_mock_diagnostics) + assert result[0] == "Export log job completed with errors." + assert result[1] is False + + ome_response_mock.json_data = { + "Id": 25011, + "value": [{"Id": 25013, "Value": "Job status for JID_255809594125 is Completed."}] + } + result = self.module.find_failed_jobs({"Id": 25012}, ome_conn_mock_diagnostics) + print(result) + assert result[0] == "Export log job completed with errors." + assert result[1] is True diff --git a/tests/unit/plugins/modules/test_ome_firmware.py b/tests/unit/plugins/modules/test_ome_firmware.py index 83ea2fa91..082b82934 100644 --- a/tests/unit/plugins/modules/test_ome_firmware.py +++ b/tests/unit/plugins/modules/test_ome_firmware.py @@ -2,8 +2,8 @@ # # Dell EMC OpenManage Ansible Modules -# Version 4.1.0 -# Copyright (C) 2019-2020 Dell Inc. or its subsidiaries. All Rights Reserved. +# Version 5.2.0 +# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # @@ -12,7 +12,7 @@ __metaclass__ = type -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import patch, mock_open +from mock import patch, mock_open import pytest import json diff --git a/tests/unit/plugins/modules/test_ome_profile.py b/tests/unit/plugins/modules/test_ome_profile.py index cc976c759..91f7fc1b5 100644 --- a/tests/unit/plugins/modules/test_ome_profile.py +++ b/tests/unit/plugins/modules/test_ome_profile.py @@ -2,7 +2,7 @@ # # Dell EMC OpenManage Ansible Modules -# Version 3.1.0 +# Version 5.2.0 # Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) @@ -13,16 +13,19 @@ __metaclass__ = type import json -import pytest -from ssl import SSLError from io import StringIO +from ssl import SSLError + +import pytest +from ansible.module_utils._text import to_text from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError from ansible.module_utils.urls import ConnectionError, SSLValidationError -from ansible.module_utils._text import to_text from ansible_collections.dellemc.openmanage.plugins.modules import ome_profile -from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants +from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_profile.' +CHANGES_MSG = "Changes found to be applied." +NO_CHANGES_MSG = "No changes found to be applied." @pytest.fixture @@ -64,6 +67,142 @@ def test_get_target_details(self, params, ome_connection_mock_for_profile, ome_r result = self.module.get_target_details(f_module, ome_connection_mock_for_profile) assert result == params["res"] + @pytest.mark.parametrize("params", + [{"mparams": { + "attributes": { + "Attributes": [ + { + "Id": 93812, + "IsIgnored": False, + "Value": "Aisle Five" + }, + { + "DisplayName": 'System, Server Topology, ServerTopology 1 Aisle Name', + "IsIgnored": False, + "Value": "Aisle 5" + } + ] + }}, "success": True, + "json_data": { + "Id": 11, + "Name": "ProfileViewEditAttributes", + "AttributeGroupNames": [], + "AttributeGroups": [ + { + "GroupNameId": 5, + "DisplayName": "System", + "SubAttributeGroups": [ + { + "GroupNameId": 33016, + "DisplayName": "Server Operating System", + "SubAttributeGroups": [], + "Attributes": [ + { + "AttributeId": 93820, + "DisplayName": "ServerOS 1 Server Host Name", + "Description": None, + "Value": None, + "IsReadOnly": False, + "IsIgnored": True, + } + ] + }, + { + "GroupNameId": 33019, + "DisplayName": "Server Topology", + "SubAttributeGroups": [], + "Attributes": [ + { + "AttributeId": 93812, + "CustomId": 0, + "AttributeEditInfoId": 2248, + "DisplayName": "ServerTopology 1 Aisle Name", + "Description": None, + "Value": "Aisle 5", + "IsReadOnly": False, + "IsIgnored": True, + }, + { + "AttributeId": 93811, + "DisplayName": "ServerTopology 1 Data Center Name", + "Value": "BLG 2nd Floor DS 1", + "IsReadOnly": False, + "IsIgnored": True, + }, + { + "AttributeId": 93813, + "DisplayName": "ServerTopology 1 Rack Name", + "Description": None, + "Value": None, + "IsReadOnly": False, + "IsIgnored": True, + }, + { + "AttributeId": 93814, + "DisplayName": "ServerTopology 1 Rack Slot", + "Description": None, + "Value": None, + "IsReadOnly": False, + "IsIgnored": True, + } + ] + } + ], + "Attributes": [] + }, + { + "GroupNameId": 9, + "DisplayName": "iDRAC", + "SubAttributeGroups": [ + { + "GroupNameId": 32688, + "DisplayName": "Active Directory", + "SubAttributeGroups": [], + "Attributes": [ + { + "AttributeId": 93523, + "DisplayName": "ActiveDirectory 1 Active Directory RAC Name", + "Description": None, + "Value": None, + "IsReadOnly": False, + "IsIgnored": True, + } + ] + }, + { + "GroupNameId": 32930, + "DisplayName": "NIC Information", + "SubAttributeGroups": [], + "Attributes": [ + { + "AttributeId": 93035, + "DisplayName": "NIC 1 DNS RAC Name", + "Description": None, + "Value": None, + "IsReadOnly": False, + "IsIgnored": True, + }, + { + "AttributeId": 92510, + "DisplayName": "NIC 1 Enable VLAN", + "Description": None, + "Value": "Disabled", + "IsReadOnly": False, + "IsIgnored": False, + } + ] + } + ], + "Attributes": []}]}, + "diff": 2}]) + def test_attributes_check(self, params, ome_connection_mock_for_profile, ome_response_mock): + ome_response_mock.success = params.get("success", True) + ome_response_mock.json_data = params["json_data"] + f_module = self.get_module_mock(params=params["mparams"]) + result = self.module.attributes_check(f_module, ome_connection_mock_for_profile, + params['mparams']['attributes'], 123) + assert result == params["diff"] + @pytest.mark.parametrize("params", [{"mparams": {"command": 'create'}, "func": "create_profile"}, {"mparams": {"command": 'modify'}, "func": "modify_profile"}, {"mparams": {"command": 'delete'}, "func": "delete_profile"}, @@ -111,19 +250,30 @@ def test_get_profile(self, params, ome_connection_mock_for_profile, ome_response }, "success": True, "json_data": [1], - "res": "Successfully created 1 profile(s)."}]) + "res": "Successfully created 1 profile(s)."}, + { + "mparams": + { + "command": "create", + "template_name": "t1", + "name_prefix": "profile", + "number_of_profiles": 1 + }, + "success": True, "check_mode": True, "json_data": [1], "res": CHANGES_MSG} + ]) def test_create_profile(self, mocker, params, ome_connection_mock_for_profile, ome_response_mock): ome_response_mock.success = params.get("success", True) ome_response_mock.json_data = params["json_data"] mocker.patch(MODULE_PATH + 'get_template_details', return_value={"Id": 12}) - f_module = self.get_module_mock(params=params["mparams"]) + f_module = self.get_module_mock(params=params["mparams"], check_mode=params.get('check_mode', False)) error_message = params["res"] with pytest.raises(Exception) as err: self.module.create_profile(f_module, ome_connection_mock_for_profile) assert err.value.args[0] == error_message @pytest.mark.parametrize("params", [ - {"mparams": {"command": "modify", "name": "profile"}, "success": True, + {"mparams": {"command": "modify", "name": "profile"}, + "success": True, "prof": {}, "json_data": 0, "res": "Profile with the name 'profile' not found."}, {"mparams": {"command": "modify", "name": "profile", "new_name": "modified profile", @@ -135,23 +285,37 @@ def test_create_profile(self, mocker, params, ome_connection_mock_for_profile, o "IsIgnored": True}]}}, "success": True, "prof": {"Id": 1234, "ProfileName": "jrofile 00002", + "ProfileDescription": "from source template t1", + "NetworkBootToIso": {"BootToNetwork": True, "ShareType": "NFS", "IsoPath": "abcd.iso", + "ShareDetail": {"IpAddress": "XX.XX.XX.XX", "ShareName": "XX.XX.XX.XX", }, + "IsoTimeout": 4}, + "ProfileState": 0, }, + "json_data": 0, + "res": "Successfully modified the profile."}, + {"mparams": {"command": "modify", "name": "myprofile", "new_name": "myprofile"}, + "success": True, + "prof": {"Id": 1234, "ProfileName": "myprofile", "ProfileDescription": "my description"}, + "json_data": 0, "res": "No changes found to be applied."}, + {"mparams": {"command": "modify", "name": "profile", "new_name": "modified profile", + "description": "new description", + "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "192.168.0.1", + "iso_path": "path/to/my_iso.iso", "iso_timeout": 8}, + "attributes": {"Attributes": [{"Id": 4506, "Value": "server attr 1", + "IsIgnored": True}]}}, "success": True, + "prof": {"Id": 1234, "ProfileName": "jrofile 00002", "ProfileDescription": "from source template t1", "NetworkBootToIso": { "BootToNetwork": True, "ShareType": "NFS", "IsoPath": "abcd.iso", - "ShareDetail": { - "IpAddress": "XX.XX.XX.XX", "ShareName": "XX.XX.XX.XX", - }, "IsoTimeout": 4 - }, "ProfileState": 0, }, "json_data": 0, - "res": "Successfully modified the profile."}, - {"mparams": {"command": "modify", "name": "myprofile", "new_name": "myprofile", "description": "my description"}, - "success": True, - "prof": {"Id": 1234, "ProfileName": "myprofile", "ProfileDescription": "my description"}, - "json_data": 0, "res": "No changes found to be applied."}, ]) + "ShareDetail": {"IpAddress": "XX.XX.XX.XX", "ShareName": "XX.XX.XX.XX"}, "IsoTimeout": 4}, + "ProfileState": 0, }, + "json_data": 0, "attributes_check": 2, "check_mode": True, "res": CHANGES_MSG} + ]) def test_modify_profile(self, mocker, params, ome_connection_mock_for_profile, ome_response_mock): ome_response_mock.success = params.get("success", True) ome_response_mock.json_data = params["json_data"] mocker.patch(MODULE_PATH + 'get_profile', return_value=params.get('prof')) - f_module = self.get_module_mock(params=params["mparams"]) + mocker.patch(MODULE_PATH + 'attributes_check', return_value=params.get('attributes_check', 0)) + f_module = self.get_module_mock(params=params["mparams"], check_mode=params.get('check_mode', False)) error_message = params["res"] with pytest.raises(Exception) as err: self.module.modify_profile(f_module, ome_connection_mock_for_profile) @@ -164,18 +328,24 @@ def test_modify_profile(self, mocker, params, ome_connection_mock_for_profile, o {"mparams": {"command": "delete", "name": "profile"}, "success": True, "prof": {"Id": 12, "ProfileState": 0}, "json_data": 0, "res": "Successfully deleted the profile."}, + {"mparams": {"command": "delete", "name": "profile"}, "success": True, + "prof": {"Id": 12, "ProfileState": 0}, "json_data": 0, "check_mode": True, + "res": CHANGES_MSG}, {"mparams": {"command": "delete", "name": "profile"}, "success": True, "prof": {}, "json_data": 0, "res": "Profile with the name 'profile' not found."}, {"mparams": {"command": "delete", "filters": "profile"}, "success": True, "prof": {"Id": 12, "ProfileState": 0}, "json_data": 0, "res": "Successfully completed the delete operation."}, + {"mparams": {"command": "delete", "filters": "profile"}, "success": True, + "prof": {"Id": 12, "ProfileState": 0}, "json_data": 0, "check_mode": True, + "res": CHANGES_MSG}, ]) def test_delete_profile(self, mocker, params, ome_connection_mock_for_profile, ome_response_mock): ome_response_mock.success = params.get("success", True) ome_response_mock.json_data = params["json_data"] mocker.patch(MODULE_PATH + 'get_profile', return_value=params.get('prof')) - f_module = self.get_module_mock(params=params["mparams"]) + f_module = self.get_module_mock(params=params["mparams"], check_mode=params.get('check_mode', False)) error_message = params["res"] with pytest.raises(Exception) as err: self.module.delete_profile(f_module, ome_connection_mock_for_profile) @@ -196,7 +366,8 @@ def test_delete_profile(self, mocker, params, ome_connection_mock_for_profile, o "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "192.168.0.1", "iso_path": "path/to/my_iso.iso", "iso_timeout": 8}, - "attributes": {"Attributes": [{"Id": 4506, "Value": "server attr 1", "IsIgnored": True}]}}, "success": True, + "attributes": {"Attributes": [{"Id": 4506, "Value": "server attr 1", "IsIgnored": True}]}}, + "success": True, "prof": {"Id": 123, "ProfileState": 0}, "target": {"Id": 234, "Name": "mytarget"}, "json_data": [23, 123], "res": "Successfully applied the assign operation."}, {"mparams": {"command": "assign", "name": "profile", "device_service_tag": "ABCDEFG", @@ -206,13 +377,67 @@ def test_delete_profile(self, mocker, params, ome_connection_mock_for_profile, o "attributes": {"Attributes": [{"Id": 4506, "Value": "server attr 1", "IsIgnored": True}]}}, "success": True, "prof": {"Id": 123, "ProfileState": 0}, "target": {"Id": 234, "Name": "mytarget"}, "json_data": [23, 123], "res": "Successfully applied the assign operation."}, + {"mparams": {"command": "assign", "name": "profile", "device_id": 234, + "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "192.168.0.1", + "iso_path": "path/to/my_iso.iso", + "iso_timeout": 8}, + "attributes": {"Attributes": [{"Id": 4506, "Value": "server attr 1", "IsIgnored": True}]}}, + "success": True, + "prof": {"Id": 123, "ProfileState": 4, "TargetId": 234}, "target": {"Id": 234, "Name": "mytarget"}, + "json_data": [23, 123], + "res": "The profile is assigned to the target 234."}, + {"mparams": {"command": "assign", "name": "profile", "device_id": 234, + "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "192.168.0.1", + "iso_path": "path/to/my_iso.iso", + "iso_timeout": 8}, + "attributes": {"Attributes": [{"Id": 4506, "Value": "server attr 1", "IsIgnored": True}]}}, + "success": True, + "prof": {"Id": 123, "ProfileState": 4, "TargetId": 235}, "target": {"Id": 234, "Name": "mytarget"}, + "json_data": [23, 123], + "res": "The profile is assigned to a different target. Use the migrate command or unassign the profile and " + "then proceed with assigning the profile to the target."}, + {"mparams": {"command": "assign", "name": "profile", "device_service_tag": "STG1234", + "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "192.168.0.1", + "iso_path": "path/to/my_iso.iso", + "iso_timeout": 8}, + "attributes": {"Attributes": [{"Id": 4506, "Value": "server attr 1", "IsIgnored": True}]}}, + "success": True, + "prof": {"Id": 123, "ProfileState": 1, "TargetId": 235, "TargetName": "STG1234"}, "target": "Target invalid.", + "json_data": [23, 123], + "res": "The profile is assigned to the target STG1234."}, + {"mparams": {"command": "assign", "name": "profile", "device_id": 123, + "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "192.168.0.1", + "iso_path": "path/to/my_iso.iso", + "iso_timeout": 8}, + "attributes": {"Attributes": [{"Id": 4506, "Value": "server attr 1", "IsIgnored": True}]}}, + "success": True, + "prof": {"Id": 123, "ProfileState": 1, "TargetId": 235, "TargetName": "STG1234"}, "target": "Target invalid.", + "json_data": [23, 123], + "res": "Target invalid."}, + {"mparams": {"command": "assign", "name": "profile", "device_id": 234, + "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "192.168.0.1", + "iso_path": "path/to/my_iso.iso", + "iso_timeout": 8}, + "attributes": {"Attributes": [{"Id": 4506, "Value": "server attr 1", "IsIgnored": True}]}}, + "success": True, "check_mode": True, + "prof": {"Id": 123, "ProfileState": 0}, "target": {"Id": 234, "Name": "mytarget"}, "json_data": [23, 123], + "res": CHANGES_MSG}, + {"mparams": {"command": "assign", "name": "profile", "device_id": 234, + "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "192.168.0.1", + "iso_path": "path/to/my_iso.iso", + "iso_timeout": 8}, + "attributes": {"Attributes": [{"Id": 4506, "Value": "server attr 1", "IsIgnored": True}]}}, + "success": True, + "prof": {"Id": 123, "ProfileState": 0, "DeploymentTaskId": 12}, "target": {"Id": 234, "Name": "mytarget"}, + "json_data": [23, 123], + "res": "Successfully triggered the job for the assign operation."}, ]) def test_assign_profile(self, mocker, params, ome_connection_mock_for_profile, ome_response_mock): ome_response_mock.success = params.get("success", True) ome_response_mock.json_data = params["json_data"] mocker.patch(MODULE_PATH + 'get_profile', return_value=params.get('prof')) mocker.patch(MODULE_PATH + 'get_target_details', return_value=params.get('target')) - f_module = self.get_module_mock(params=params["mparams"]) + f_module = self.get_module_mock(params=params["mparams"], check_mode=params.get('check_mode', False)) error_message = params["res"] mocker.patch(MODULE_PATH + 'time.sleep', return_value=None) with pytest.raises(Exception) as err: @@ -231,12 +456,25 @@ def test_assign_profile(self, mocker, params, ome_connection_mock_for_profile, o "json_data": 0, "res": "Successfully applied the unassign operation. No job was triggered."}, {"mparams": {"command": "unassign", "filters": "profile"}, "success": True, "json_data": 0, "prof": {"Id": 12, "ProfileState": 1}, - "res": "Successfully applied the unassign operation. No job was triggered."}, ]) + "res": "Successfully applied the unassign operation. No job was triggered."}, + {"mparams": {"command": "unassign", "name": "profile"}, "success": True, + "prof": {"Id": 12, "ProfileState": 4, "DeploymentTaskId": 123}, + "json_data": {"LastRunStatus": {"Name": "Running"}}, + "res": "Profile deployment task is in progress. Wait for the job to finish."}, + {"mparams": {"command": "unassign", "name": "profile"}, "success": True, + "prof": {"Id": 12, "ProfileState": 4, "DeploymentTaskId": 123}, + "json_data": {"LastRunStatus": {"Name": "Starting"}}, + "res": "Successfully triggered a job for the unassign operation."}, + {"mparams": {"command": "unassign", "name": "profile"}, "success": True, + "prof": {"Id": 12, "ProfileState": 4, "DeploymentTaskId": 123}, + "json_data": {"LastRunStatus": {"Name": "Starting"}}, "check_mode": True, + "res": CHANGES_MSG} + ]) def test_unassign_profile(self, mocker, params, ome_connection_mock_for_profile, ome_response_mock): ome_response_mock.success = params.get("success", True) ome_response_mock.json_data = params["json_data"] mocker.patch(MODULE_PATH + 'get_profile', return_value=params.get('prof')) - f_module = self.get_module_mock(params=params["mparams"]) + f_module = self.get_module_mock(params=params["mparams"], check_mode=params.get('check_mode', False)) error_message = params["res"] mocker.patch(MODULE_PATH + 'time.sleep', return_value=None) with pytest.raises(Exception) as err: @@ -259,13 +497,25 @@ def test_unassign_profile(self, mocker, params, ome_connection_mock_for_profile, {"mparams": {"command": "migrate", "name": "profile"}, "success": True, "prof": {"Id": 12, "ProfileState": 4, "TargetId": 12}, "target": {"Id": 12}, "json_data": 0, "res": "No changes found to be applied."}, + {"mparams": {"command": "migrate", "name": "profile"}, "success": True, + "prof": {"Id": 12, "ProfileState": 4, "TargetId": 14, "DeploymentTaskId": 123}, + "target": "Target invalid.", + "json_data": [1, 2, 3], "res": "Target invalid."}, + {"mparams": {"command": "migrate", "name": "profile"}, "success": True, + "prof": {"Id": 12, "ProfileState": 4, "TargetId": 14, "DeploymentTaskId": 123}, + "target": {"Id": 12}, + "json_data": [12, 21, 13], "res": "The target device is invalid for the given profile."}, + {"mparams": {"command": "migrate", "name": "profile"}, "success": True, + "prof": {"Id": 12, "ProfileState": 4, "TargetId": 14, "DeploymentTaskId": 123}, + "target": {"Id": 12}, "check_mode": True, + "json_data": [1, 2, 3], "res": CHANGES_MSG}, ]) def test_migrate_profile(self, mocker, params, ome_connection_mock_for_profile, ome_response_mock): ome_response_mock.success = params.get("success", True) ome_response_mock.json_data = params["json_data"] mocker.patch(MODULE_PATH + 'get_profile', return_value=params.get('prof')) mocker.patch(MODULE_PATH + 'get_target_details', return_value=params.get('target')) - f_module = self.get_module_mock(params=params["mparams"]) + f_module = self.get_module_mock(params=params["mparams"], check_mode=params.get('check_mode', False)) error_message = params["res"] mocker.patch(MODULE_PATH + 'time.sleep', return_value=None) with pytest.raises(Exception) as err: diff --git a/tests/unit/plugins/modules/test_ome_template.py b/tests/unit/plugins/modules/test_ome_template.py index 9a7b748b4..27c84ffab 100644 --- a/tests/unit/plugins/modules/test_ome_template.py +++ b/tests/unit/plugins/modules/test_ome_template.py @@ -2,7 +2,7 @@ # # Dell EMC OpenManage Ansible Modules -# Version 3.4.0 +# Version 5.2.0 # Copyright (C) 2019-2020 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) @@ -12,16 +12,16 @@ __metaclass__ = type -import pytest import json +from io import StringIO from ssl import SSLError -from ansible_collections.dellemc.openmanage.plugins.modules import ome_template + +import pytest +from ansible.module_utils._text import to_text from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError from ansible.module_utils.urls import ConnectionError, SSLValidationError -from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants, \ - AnsibleFailJSonException -from io import StringIO -from ansible.module_utils._text import to_text +from ansible_collections.dellemc.openmanage.plugins.modules import ome_template +from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_template.' @@ -64,6 +64,139 @@ def test_get_device_ids_failure_case01(self, ome_connection_mock_for_template, o assert exc.value.args[0] == "Unable to complete the operation because the entered target device id(s) " \ "'{0}' are invalid.".format("#@!1") + @pytest.mark.parametrize("params", + [{"mparams": { + "attributes": { + "Attributes": [ + { + "Id": 93812, + "IsIgnored": False, + "Value": "Aisle Five" + }, + { + "DisplayName": 'System, Server Topology, ServerTopology 1 Aisle Name', + "IsIgnored": False, + "Value": "Aisle 5" + } + ] + }}, "success": True, + "json_data": { + "Id": 11, + "Name": "ProfileViewEditAttributes", + "AttributeGroupNames": [], + "AttributeGroups": [ + { + "GroupNameId": 5, + "DisplayName": "System", + "SubAttributeGroups": [ + { + "GroupNameId": 33016, + "DisplayName": "Server Operating System", + "SubAttributeGroups": [], + "Attributes": [ + { + "AttributeId": 93820, + "DisplayName": "ServerOS 1 Server Host Name", + "Value": None, + "IsReadOnly": False, + "IsIgnored": True, + } + ] + }, + { + "GroupNameId": 33019, + "DisplayName": "Server Topology", + "SubAttributeGroups": [], + "Attributes": [ + { + "AttributeId": 93812, + "DisplayName": "ServerTopology 1 Aisle Name", + "Value": "Aisle 5", + "IsReadOnly": False, + "IsIgnored": True, + }, + { + "AttributeId": 93811, + "DisplayName": "ServerTopology 1 Data Center Name", + "Description": None, + "Value": "BLG 2nd Floor DS 1", + "IsReadOnly": False, + "IsIgnored": True, + }, + { + "AttributeId": 93813, + "DisplayName": "ServerTopology 1 Rack Name", + "Description": None, + "Value": None, + "IsReadOnly": False, + "IsIgnored": True, + }, + { + "AttributeId": 93814, + "DisplayName": "ServerTopology 1 Rack Slot", + "Description": None, + "Value": None, + "IsReadOnly": False, + "IsIgnored": True, + } + ] + } + ], + "Attributes": [] + }, + { + "GroupNameId": 9, + "DisplayName": "iDRAC", + "SubAttributeGroups": [ + { + "GroupNameId": 32688, + "DisplayName": "Active Directory", + "SubAttributeGroups": [], + "Attributes": [ + { + "AttributeId": 93523, + "DisplayName": "ActiveDirectory 1 Active Directory RAC Name", + "Description": None, + "Value": None, + "IsReadOnly": False, + "IsIgnored": True, + } + ] + }, + { + "GroupNameId": 32930, + "DisplayName": "NIC Information", + "SubAttributeGroups": [], + "Attributes": [ + { + "AttributeId": 93035, + "DisplayName": "NIC 1 DNS RAC Name", + "Description": None, + "Value": None, + "IsReadOnly": False, + "IsIgnored": True, + }, + { + "AttributeId": 92510, + "DisplayName": "NIC 1 Enable VLAN", + "Description": None, + "Value": "Disabled", + "IsReadOnly": False, + "IsIgnored": False, + } + ] + } + ], + "Attributes": []}]}, + "diff": 2}]) + def test_attributes_check(self, params, ome_connection_mock_for_template, ome_response_mock): + ome_response_mock.success = params.get("success", True) + ome_response_mock.json_data = params["json_data"] + f_module = self.get_module_mock(params=params["mparams"]) + result = self.module.attributes_check(f_module, ome_connection_mock_for_template, + params['mparams']['attributes'], 123) + assert result == params["diff"] + def test_get_device_ids_failure_case_02(self, ome_connection_mock_for_template, ome_response_mock, ome_default_args): ome_connection_mock_for_template.get_all_report_details.return_value = { @@ -99,11 +232,13 @@ def test_get_view_id_success_case(self, ome_connection_mock_for_template, ome_re create_payload = {"Fqdds": "All", # Mandatory for create "ViewTypeId": 4, "attributes": {"Name": "create template name"}, "SourceDeviceId": 2224} - @pytest.mark.parametrize("param", [create_payload]) - def test_get_create_payload(self, param, ome_response_mock): + @pytest.mark.parametrize("param", [{"Fqdds": "All", # Mandatory for create + "ViewTypeId": 4, "attributes": {"Name": "create template name"}, + "SourceDeviceId": 2224}]) + def test_get_create_payload(self, param, ome_response_mock, ome_connection_mock_for_template): f_module = self.get_module_mock(params=param) - data = self.module.get_create_payload(f_module, 2224, 4) - assert data + data = self.module.get_create_payload(f_module, ome_connection_mock_for_template, 2224, 4) + assert data['Fqdds'] == "All" def test_get_template_by_id_success_case(self, ome_response_mock): ome_response_mock.json_data = {'value': []} @@ -120,8 +255,8 @@ def test_get_template_by_name_success_case(self, ome_response_mock, ome_connecti f_module = self.get_module_mock() data = self.module.get_template_by_name("test Sample Template import1", f_module, ome_connection_mock_for_template) - assert data[0]["Name"] == "test Sample Template import1" - assert data[0]["Id"] == 24 + assert data["Name"] == "test Sample Template import1" + assert data["Id"] == 24 def test_get_group_devices_all(self, ome_response_mock, ome_connection_mock_for_template): ome_response_mock.json_data = {'value': [{"Name": "Device1", "Id": 24}]} @@ -131,7 +266,7 @@ def test_get_group_devices_all(self, ome_response_mock, ome_connection_mock_for_ data = self.module.get_group_devices_all(ome_connection_mock_for_template, "uri") assert data == [{"Name": "Device1", "Id": 24}] - def test_get_template_by_name_fail_case(self, ome_response_mock): + def _test_get_template_by_name_fail_case(self, ome_response_mock): ome_response_mock.json_data = {'value': [{"Name": "template by name for template name", "Id": 12}]} ome_response_mock.status_code = 500 ome_response_mock.success = False @@ -199,18 +334,21 @@ def test__get_resource_parameters_modify_success_case(self, mocker, ome_response return_value={}) mocker.patch(MODULE_PATH + 'get_modify_payload', return_value={}) + mocker.patch(MODULE_PATH + 'get_template_details', return_value={"Id": 1234, "Name": "templ1"}) data = self.module._get_resource_parameters(f_module, ome_connection_mock_for_template) assert data == ('TemplateService/Templates(1234)', {}, 'PUT') def test__get_resource_parameters_delete_success_case(self, mocker, ome_response_mock, ome_connection_mock_for_template): f_module = self.get_module_mock({"command": "delete", "template_id": 1234}) + mocker.patch(MODULE_PATH + 'get_template_details', return_value={"Id": 1234, "Name": "templ1"}) data = self.module._get_resource_parameters(f_module, ome_connection_mock_for_template) assert data == ('TemplateService/Templates(1234)', {}, 'DELETE') def test__get_resource_parameters_export_success_case(self, mocker, ome_response_mock, ome_connection_mock_for_template): f_module = self.get_module_mock({"command": "export", "template_id": 1234}) + mocker.patch(MODULE_PATH + 'get_template_details', return_value={"Id": 1234, "Name": "templ1"}) data = self.module._get_resource_parameters(f_module, ome_connection_mock_for_template) assert data == ('TemplateService/Actions/TemplateService.Export', {'TemplateId': 1234}, 'POST') @@ -221,6 +359,7 @@ def test__get_resource_parameters_deploy_success_case(self, mocker, ome_response return_value=[Constants.device_id1]) mocker.patch(MODULE_PATH + 'get_deploy_payload', return_value={"deploy_payload": "value"}) + mocker.patch(MODULE_PATH + 'get_template_details', return_value={"Id": 1234, "Name": "templ1"}) data = self.module._get_resource_parameters(f_module, ome_connection_mock_for_template) assert data == ('TemplateService/Actions/TemplateService.Deploy', {"deploy_payload": "value"}, 'POST') @@ -231,6 +370,7 @@ def test__get_resource_parameters_clone_success_case(self, mocker, ome_response_ return_value=2) mocker.patch(MODULE_PATH + 'get_clone_payload', return_value={"clone_payload": "value"}) + mocker.patch(MODULE_PATH + 'get_template_details', return_value={"Id": 1234, "Name": "templ1"}) data = self.module._get_resource_parameters(f_module, ome_connection_mock_for_template) assert data == ('TemplateService/Actions/TemplateService.Clone', {"clone_payload": "value"}, 'POST') @@ -317,18 +457,6 @@ def test_main_template_success_case2(self, ome_default_args, mocker, module_mock assert result['changed'] is True assert result['msg'] == "Successfully created a template with ID {0}".format(ome_response_mock.json_data) - @pytest.mark.parametrize("param", - [{"attr": {"attributes": {}}, "template_id": 1234, - "template_dict": {"Name": "template1", "Description": "template description"}}, - {"attr": {"attributes": {"Name": "1", "Description": "d1"}}, "template_id": 1234, - "template_dict": {"Name": "template1", "Description": "template description"}}, - {"attr": {"attributes": []}, "template_id": 1234, - "template_dict": {"Name": "template1", "Description": "template description"}} - ]) - def test_get_modify_payload_success_case_01(self, param): - self.module.get_modify_payload(param["attr"], param["template_id"], - param["template_dict"]) - def test_get_import_payload_success_case_01(self, ome_connection_mock_for_template): f_module = self.get_module_mock(params={"attributes": {"Name": "template1", "Content": "Content"}}) self.module.get_import_payload(f_module, ome_connection_mock_for_template, 2) @@ -338,10 +466,15 @@ def test_get_deploy_payload_success_case_01(self): self.module.get_deploy_payload(module_params, [Constants.device_id1], 1234) @pytest.mark.parametrize("param", - [{"attr": {"attributes": {"Name": "template1"}}, "name": "template1"}]) - def test_get_clone_payload_success_case_01(self, param): - module_params = param["attr"] - self.module.get_clone_payload(module_params, 1234, 2) + [{"mparams": {"attributes": {"Name": "template1"}}, "name": "template0", + "template_id": 123, + "clone_payload": {"SourceTemplateId": 123, "NewTemplateName": "template1", + "ViewTypeId": 2}}]) + def test_get_clone_payload_success_case_01(self, param, ome_connection_mock_for_template): + f_module = self.get_module_mock(param["mparams"]) + module_params = param["mparams"] + payload = self.module.get_clone_payload(f_module, ome_connection_mock_for_template, param['template_id'], 2) + assert payload == param['clone_payload'] @pytest.mark.parametrize("param", [{"inp": {"command": "create", "template_name": "name", "device_id": [None], @@ -382,6 +515,66 @@ def test_get_group_details(self, param, ome_connection_mock_for_template, mocker dev_list = self.module.get_group_details(ome_connection_mock_for_template, f_module) assert dev_list == param["dev_list"] + @pytest.mark.parametrize("param", [ + {"inp": {"command": "deploy", "template_name": "name", + "device_group_names": ["mygroup"]}, + "group": {'Id': 23, "Name": "mygroup"}, + "dev_list": [1, 2, 3]}]) + def test_modify_payload(self, param, ome_connection_mock_for_template, mocker, + ome_response_mock): + f_module = self.get_module_mock(param["inp"]) + ome_response_mock.json_data = { + "value": [{'Id': 1, "Name": "mygroup3"}, {'Id': 2, "Name": "mygroup2"}, {'Id': 3, "Name": "mygroup"}]} + ome_response_mock.status_code = 200 + mocker.patch(MODULE_PATH + 'get_group_devices_all', return_value=[{'Id': 1}, {'Id': 2}, {'Id': 3}]) + dev_list = self.module.get_group_details(ome_connection_mock_for_template, f_module) + assert dev_list == param["dev_list"] + + @pytest.mark.parametrize("params", [ + {"mparams": {"command": "modify", "name": "profile", "attributes": { + "Attributes": [ + { + "Id": 93812, + "IsIgnored": False, + "Value": "Aisle Five" + }, + { + "DisplayName": 'System, Server Topology, ServerTopology 1 Aisle Name', + "IsIgnored": False, + "Value": "Aisle 5" + }]}}, + "success": True, "template": {"Name": "template_name", "Id": 123, "Description": "temp described"}, + "json_data": 0, "get_template_by_name": {"Name": "template1", "Id": 122, "Description": "temp described"}, + "res": "No changes found to be applied."}, + {"mparams": {"command": "modify", "name": "profile", "attributes": { + "Name": "new_name", + "Attributes": [ + { + "Id": 93812, + "IsIgnored": False, + "Value": "Aisle Five" + }, + { + "DisplayName": 'System, Server Topology, ServerTopology 1 Aisle Name', + "IsIgnored": False, + "Value": "Aisle 5" + }]}}, "success": True, + "template": {"Name": "template_name", "Id": 123, "Description": "temp described"}, "json_data": 0, + "get_template_by_name": {"Name": "template1", "Id": 122, "Description": "temp described"}, + "res": "Template with name 'new_name' already exists."} + ]) + def test_modify_payload(self, params, ome_connection_mock_for_template, mocker, + ome_response_mock): + ome_response_mock.success = params.get("success", True) + ome_response_mock.json_data = params["json_data"] + mocker.patch(MODULE_PATH + 'get_template_by_name', return_value=params.get('get_template_by_name')) + mocker.patch(MODULE_PATH + 'attributes_check', return_value=params.get('attributes_check', 0)) + f_module = self.get_module_mock(params=params["mparams"], check_mode=params.get('check_mode', False)) + error_message = params["res"] + with pytest.raises(Exception) as err: + self.module.get_modify_payload(f_module, ome_connection_mock_for_template, params.get('template')) + assert err.value.args[0] == error_message + @pytest.mark.parametrize("exc_type", [IOError, ValueError, TypeError, ConnectionError, HTTPError, URLError, SSLError]) diff --git a/tests/unit/plugins/modules/test_redfish_firmware.py b/tests/unit/plugins/modules/test_redfish_firmware.py index f16c59a91..0d160778d 100644 --- a/tests/unit/plugins/modules/test_redfish_firmware.py +++ b/tests/unit/plugins/modules/test_redfish_firmware.py @@ -2,8 +2,8 @@ # # Dell EMC OpenManage Ansible Modules -# Version 3.0.0 -# Copyright (C) 2020 Dell Inc. or its subsidiaries. All Rights Reserved. +# Version 5.2.0 +# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # @@ -18,12 +18,12 @@ import json from ansible_collections.dellemc.openmanage.plugins.modules import redfish_firmware from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock +from mock import MagicMock from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError from ansible.module_utils.urls import ConnectionError, SSLValidationError from io import StringIO from ansible.module_utils._text import to_text -from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import patch, mock_open +from mock import patch, mock_open MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'