diff --git a/plugins/modules/device_credential_workflow_manager.py b/plugins/modules/device_credential_workflow_manager.py index 163a31b293..50b2277f12 100644 --- a/plugins/modules/device_credential_workflow_manager.py +++ b/plugins/modules/device_credential_workflow_manager.py @@ -756,6 +756,35 @@ site_name: - Global/Vietnam/halong/Hanoi + - name: Delete credentials + cisco.dnac.device_credential_workflow_manager: + dnac_host: "{{ dnac_host }}" + dnac_port: "{{ dnac_port }}" + dnac_username: "{{ dnac_username }}" + dnac_password: "{{ dnac_password }}" + dnac_verify: "{{ dnac_verify }}" + dnac_debug: "{{ dnac_debug }}" + dnac_log: True + state: deleted + config_verify: True + config: + - global_credential_details: + cli_credential: + - description: CLI1 + username: cli1 + snmp_v2c_read: + - description: SNMPv2c Read1 # use this for deletion + snmp_v2c_write: + - description: SNMPv2c Write1 # use this for deletion + snmp_v3: + - description: snmpV31 + https_read: + - description: HTTP Read1 + username: HTTP_Read1 + https_write: + - description: HTTP Write1 + username: HTTP_Write1 + """ RETURN = r""" diff --git a/plugins/modules/lan_automation_workflow_manager.py b/plugins/modules/lan_automation_workflow_manager.py index 05d6df3e41..7eed1972be 100644 --- a/plugins/modules/lan_automation_workflow_manager.py +++ b/plugins/modules/lan_automation_workflow_manager.py @@ -1234,6 +1234,12 @@ def extract_lan_automation(self, config): msg="IP address: {} does not exist in Catalyst Center. Please provide a valid IP address for " "'lan_automation -> peer_device_management_ip_address'!".format(peer_device_ip), response=[] ) + self.log("Validate peer device management IP address is not the same as primary device IP", "INFO") + if primary_device_ip == peer_device_ip: + self.module.fail_json( + msg="The primary device management IP address '{}' cannot be the same as the peer device IP " + "address '{}'.".format(primary_device_ip, peer_device_ip), response=[] + ) self.log("Peer device management IP address '{}' is valid.".format(peer_device_ip), "DEBUG") else: self.log("Peer device IP not provided. Skipping peer device checks.", "INFO") diff --git a/plugins/modules/network_settings_workflow_manager.py b/plugins/modules/network_settings_workflow_manager.py index dcc2d5ab06..db6be10030 100644 --- a/plugins/modules/network_settings_workflow_manager.py +++ b/plugins/modules/network_settings_workflow_manager.py @@ -100,6 +100,13 @@ The former identifier for the global pool. It should be used exclusively when you need to update the global pool's name. type: str + force_delete: + description: > + Forcefully delete all IP pools from the global level of the global pool. + The default value is false. + type: bool + required: false + default: false reserve_pool_details: description: Reserved IP subpool details from the global pool. @@ -227,6 +234,14 @@ Allows devices on IPv6 networks to self-configure their IP addresses autonomously, eliminating the need for manual setup. type: bool + force_delete: + description: > + Forcefully delete all IP pools from the reserve level of the IP sub-pool. + The default value is false. + type: bool + required: false + default: false + network_management_details: description: Set default network settings for the site type: list @@ -445,10 +460,10 @@ dnac_port: "{{dnac_port}}" dnac_version: "{{dnac_version}}" dnac_debug: "{{dnac_debug}}" - dnac_log: True + dnac_log: true dnac_log_level: "{{ dnac_log_level }}" state: merged - config_verify: True + config_verify: true config: - global_pool_details: settings: @@ -470,25 +485,25 @@ dnac_port: "{{dnac_port}}" dnac_version: "{{dnac_version}}" dnac_debug: "{{dnac_debug}}" - dnac_log: True + dnac_log: true dnac_log_level: "{{ dnac_log_level }}" state: merged - config_verify: True + config_verify: true config: - reserve_pool_details: - site_name: string name: string pool_type: LAN - ipv6_address_space: True + ipv6_address_space: true ipv4_global_pool: string - ipv4_prefix: True + ipv4_prefix: true ipv4_prefix_length: 9 ipv4_subnet: string - ipv6_prefix: True + ipv6_prefix: true ipv6_prefix_length: 64 ipv6_global_pool: string ipv6_subnet: string - slaac_support: True + slaac_support: true - name: Create reserve an ip pool using global pool name cisco.dnac.network_settings_workflow_manager: @@ -499,25 +514,25 @@ dnac_port: "{{dnac_port}}" dnac_version: "{{dnac_version}}" dnac_debug: "{{dnac_debug}}" - dnac_log: True + dnac_log: true dnac_log_level: "{{ dnac_log_level }}" state: merged - config_verify: True + config_verify: true config: - reserve_pool_details: - name: string site_name: string pool_type: LAN - ipv6_address_space: True + ipv6_address_space: true ipv4_global_pool_name: string - ipv4_prefix: True + ipv4_prefix: true ipv4_prefix_length: 9 ipv4_subnet: string - ipv6_prefix: True + ipv6_prefix: true ipv6_prefix_length: 64 ipv6_global_pool_name: string ipv6_subnet: string - slaac_support: True + slaac_support: true - name: Delete reserved pool cisco.dnac.network_settings_workflow_manager: @@ -528,10 +543,10 @@ dnac_port: "{{dnac_port}}" dnac_version: "{{dnac_version}}" dnac_debug: "{{dnac_debug}}" - dnac_log: True + dnac_log: true dnac_log_level: "{{ dnac_log_level }}" state: deleted - config_verify: True + config_verify: true config: - reserve_pool_details: - site_name: string @@ -565,10 +580,10 @@ dnac_port: "{{dnac_port}}" dnac_version: "{{dnac_version}}" dnac_debug: "{{dnac_debug}}" - dnac_log: True + dnac_log: true dnac_log_level: "{{ dnac_log_level }}" state: merged - config_verify: True + config_verify: true config: - network_management_details: - site_name: string @@ -587,10 +602,10 @@ ip_address: string port: 443 snmp_server: - configure_dnac_ip: True + configure_dnac_ip: true ip_addresses: list syslog_server: - configure_dnac_ip: True + configure_dnac_ip: true ip_addresses: list - name: Adding the network_aaa and client_and_endpoint_aaa AAA server @@ -602,10 +617,10 @@ dnac_port: "{{dnac_port}}" dnac_version: "{{dnac_version}}" dnac_debug: "{{dnac_debug}}" - dnac_log: True + dnac_log: true dnac_log_level: "{{ dnac_log_level }}" state: merged - config_verify: True + config_verify: true config: - network_management_details: - site_name: string @@ -630,10 +645,10 @@ dnac_port: "{{dnac_port}}" dnac_version: "{{dnac_version}}" dnac_debug: "{{dnac_debug}}" - dnac_log: True + dnac_log: true dnac_log_level: "{{ dnac_log_level }}" state: merged - config_verify: True + config_verify: true config: - network_management_details: - site_name: string @@ -751,6 +766,7 @@ def validate_input(self): "name": {"type": 'string'}, "prev_name": {"type": 'string'}, "pool_type": {"type": 'string', "choices": ["Generic", "Tunnel"]}, + 'force_delete': {'type': 'bool', 'required': False, 'default': True}, } } }, @@ -782,6 +798,7 @@ def validate_input(self): "type": 'string', "choices": ["Generic", "LAN", "Management", "Service", "WAN"] }, + 'force_delete': {'type': 'bool', 'required': False, 'default': True}, }, "network_management_details": { "type": 'list', @@ -1745,7 +1762,7 @@ def get_network_params_v2(self, site_name, site_id): return network_details - def get_reserved_ip_subpool(self, site_id): + def get_reserved_ip_subpool(self, site_name, site_id): """ Retrieve all the reserved IP subpool details from the Cisco Catalyst Center. @@ -1757,7 +1774,7 @@ def get_reserved_ip_subpool(self, site_id): self (object) - The current object with updated desired reserved subpool information. """ - value = 1 + offset = 1 self.all_reserved_pool_details.update({site_id: []}) start_time = time.time() while True: @@ -1768,14 +1785,12 @@ def get_reserved_ip_subpool(self, site_id): op_modifies=True, params={ "site_id": site_id, - "offset": value + "offset": offset } ) except Exception as msg: - self.msg = ( - "Exception occurred while getting the reserved pool details " - "from Cisco Catalyst Center: {msg}".format(msg=msg) - ) + self.msg = "Exception occurred while fetching reserved pool details for site '{0}': {1}".format( + site_name, site_id) self.log(str(msg), "ERROR") self.status = "failed" return self @@ -1787,12 +1802,19 @@ def get_reserved_ip_subpool(self, site_id): reserve_pool_details = response.get("response") if not reserve_pool_details: - self.log("No subpools are reserved in the site with ID - '{0}'." - .format(site_id), "DEBUG") + self.log("No subpools are reserved in the site with ID - '{0}': '{1}'." + .format(site_id, site_name), "DEBUG") return self self.all_reserved_pool_details.get(site_id).extend(reserve_pool_details) - value += 25 + + if len(reserve_pool_details) < 25: + self.log("Found {0} record(s), No more record available for the next offset" + .format(str(len(reserve_pool_details))), "INFO") + break + + offset += 25 + end_time = time.time() if (end_time - start_time) >= self.max_timeout: self.msg = ( @@ -1823,13 +1845,14 @@ def global_pool_exists(self, name): "details": None, "id": None } - value = 1 + all_global_pool = [] + offset = 1 while True: try: response = self.dnac._exec( family="network_settings", function="get_global_pool", - params={"offset": value} + params={"offset": offset} ) except Exception as msg: self.msg = ( @@ -1848,18 +1871,43 @@ def global_pool_exists(self, name): all_global_pool_details = response.get("response") if not all_global_pool_details: + if name == "": + self.log("Global pool '{0}' does not exist".format(all_global_pool), "INFO") + return all_global_pool self.log("Global pool '{0}' does not exist".format(name), "INFO") return global_pool - global_pool_details = get_dict_result(all_global_pool_details, "ipPoolName", name) - if global_pool_details: + global_pool_details = None + if name == "": + global_pool_details = all_global_pool_details + else: + global_pool_details = get_dict_result(all_global_pool_details, "ipPoolName", name) + + if global_pool_details and isinstance(global_pool_details, dict): self.log("Global pool found with name '{0}': {1}".format(name, global_pool_details), "INFO") global_pool.update({"exists": True}) global_pool.update({"id": global_pool_details.get("id")}) global_pool["details"] = self.get_global_pool_params(global_pool_details) break - value += 25 + if global_pool_details and isinstance(global_pool_details, list) and name == "": + self.log("Global pool found {0}".format( + self.pprint(global_pool_details)), "INFO") + + for each_pool in global_pool_details: + global_del_pool = { + "exists": True, + "id": each_pool.get("id"), + "details": self.get_global_pool_params(each_pool) + } + all_global_pool.append(global_del_pool) + + if len(all_global_pool_details) < 25: + self.log("Found {0} record(s), No more record available for the next offset" + .format(str(len(all_global_pool_details))), "INFO") + break + + offset += 25 self.log("Formatted global pool details: {0}".format(global_pool), "DEBUG") return global_pool @@ -1895,24 +1943,48 @@ def reserve_pool_exists(self, name, site_name): return reserve_pool if not self.all_reserved_pool_details.get(site_id): - self.get_reserved_ip_subpool(site_id) + self.get_reserved_ip_subpool(site_name, site_id) if not self.all_reserved_pool_details.get(site_id): self.log("Reserved pool {0} does not exist in the site {1}" .format(name, site_name), "DEBUG") return reserve_pool - reserve_pool_details = get_dict_result(self.all_reserved_pool_details.get(site_id), "groupName", name) - if reserve_pool_details: - self.log("Reserve pool found with name '{0}' in the site '{1}': {2}" + reserve_pool_details = None + if name == "": + reserve_pool_details = self.all_reserved_pool_details.get(site_id) + else: + reserve_pool_details = get_dict_result( + self.all_reserved_pool_details.get(site_id), "groupName", name) + + if reserve_pool_details and isinstance(reserve_pool_details, dict): + self.log("Reserve pool found with name {0} in the site '{1}': {2}" .format(name, site_name, reserve_pool_details), "INFO") reserve_pool.update({"exists": True}) reserve_pool.update({"id": reserve_pool_details.get("id")}) reserve_pool.update({"details": self.get_reserve_pool_params(reserve_pool_details)}) + self.log("Reserved pool details: {0}".format(reserve_pool.get("details")), "DEBUG") + self.log("Reserved pool id: {0}".format(reserve_pool.get("id")), "DEBUG") + return reserve_pool - self.log("Reserved pool details: {0}".format(reserve_pool.get("details")), "DEBUG") - self.log("Reserved pool id: {0}".format(reserve_pool.get("id")), "DEBUG") - return reserve_pool + if reserve_pool_details and isinstance(reserve_pool_details, list): + self.log("Found reserve pools for site '{0}': {1}" + .format(site_name, self.pprint(reserve_pool_details)), "INFO") + all_reserve_pool = [] + for each_pool in reserve_pool_details: + reserve_del_pool = { + "exists": True, + "id": each_pool.get("id"), + "details": self.get_reserve_pool_params(each_pool), + "success": True + } + all_reserve_pool.append(reserve_del_pool) + + self.log("Reserved pool list details: {0}".format( + self.pprint(all_reserve_pool)), "DEBUG") + return all_reserve_pool + + return None def get_have_global_pool(self, global_pool_details): """ @@ -1944,22 +2016,26 @@ def get_have_global_pool(self, global_pool_details): errors = [] # To collect all error messages for pool_details in global_pool_ippool: + delete_all = pool_details.get("force_delete") name = pool_details.get("name") - if name is None: - errors.append("Missing required parameter 'name' in global_pool_details: {}".format(pool_details)) - continue + if delete_all: + name = "" + else: + if name is None: + errors.append("Missing required parameter 'name' in global_pool_details: {}".format(pool_details)) + continue - name_length = len(name) - if name_length > 100: - errors.append("The length of the 'name' in global_pool_details should be less or equal to 100. Invalid_config: {}".format(pool_details)) + name_length = len(name) + if name_length > 100: + errors.append("The length of the 'name' in global_pool_details should be less or equal to 100. Invalid_config: {}".format(pool_details)) - if " " in name: - errors.append("The 'name' in global_pool_details should not contain any spaces. Invalid_config: {}".format(pool_details)) + if " " in name: + errors.append("The 'name' in global_pool_details should not contain any spaces. Invalid_config: {}".format(pool_details)) - pattern = r'^[\w\-./]+$' - if not re.match(pattern, name): - errors.append("The 'name' in global_pool_details should contain only letters, numbers, and -_./ characters. Invalid_config: {}" - .format(pool_details)) + pattern = r'^[\w\-./]+$' + if not re.match(pattern, name): + errors.append("The 'name' in global_pool_details should contain only letters, numbers, and -_./ characters. Invalid_config: {}" + .format(pool_details)) if errors: # If there are errors, return a failure status with all messages @@ -1967,19 +2043,32 @@ def get_have_global_pool(self, global_pool_details): self.status = "failed" return self + if name == "": + global_pool.append(self.global_pool_exists(name)) + self.log("Global pool details: {0}".format(global_pool), "DEBUG") + self.have.update({"globalPool": global_pool}) + self.msg = "Collected all global pool details from the Cisco Catalyst Center." + self.status = "success" + return self + for pool_details in global_pool_ippool: - name = pool_details.get("name") # If the Global Pool doesn't exist and a previous name is provided # Else try using the previous name + name = pool_details.get("name") + if pool_details.get("force_delete"): + name = "" + global_pool.append(self.global_pool_exists(name)) - self.log("Global pool details of '{0}': {1}".format(name, global_pool[global_pool_index]), "DEBUG") + self.log("Global pool details of '{0}': {1}".format( + name, global_pool[global_pool_index]), "DEBUG") prev_name = pool_details.get("prev_name") if global_pool[global_pool_index].get("exists") is False and \ prev_name is not None: global_pool.pop() global_pool.append(self.global_pool_exists(prev_name)) if global_pool[global_pool_index].get("exists") is False: - self.msg = "Prev name {0} doesn't exist in global_pool_details".format(prev_name) + self.msg = "Prev name {0} doesn't exist in global_pool_details".format( + prev_name) self.status = "failed" return self @@ -2008,28 +2097,33 @@ def get_have_reserve_pool(self, reserve_pool_details): reserve_pool = [] reserve_pool_index = 0 for item in reserve_pool_details: + delete_all = item.get("force_delete") name = item.get("name") - if name is None: - self.msg = "Missing required parameter 'name' in reserve_pool_details." - self.status = "failed" - return self + site_name = item.get("site_name") + if delete_all: + name = "" + else: + if name is None: + self.msg = "Missing required parameter 'name' in reserve_pool_details." + self.status = "failed" + return self - name_length = len(name) - if name_length > 100: - self.msg = "The length of the 'name' in reserve_pool_details should be less or equal to 100." - self.status = "failed" - return self + name_length = len(name) + if name_length > 100: + self.msg = "The length of the 'name' in reserve_pool_details should be less or equal to 100." + self.status = "failed" + return self - if " " in name: - self.msg = "The 'name' in reserve_pool_details should not contain any spaces." - self.status = "failed" - return self + if " " in name: + self.msg = "The 'name' in reserve_pool_details should not contain any spaces." + self.status = "failed" + return self - pattern = r'^[\w\-./]+$' - if not re.match(pattern, name): - self.msg = "The 'name' in reserve_pool_details should contain only letters, numbers and -_./ characters." - self.status = "failed" - return self + pattern = r'^[\w\-./]+$' + if not re.match(pattern, name): + self.msg = "The 'name' in reserve_pool_details should contain only letters, numbers and -_./ characters." + self.status = "failed" + return self site_name = item.get("site_name") self.log("Site Name: {0}".format(site_name), "DEBUG") @@ -2040,37 +2134,48 @@ def get_have_reserve_pool(self, reserve_pool_details): # Check if the Reserved Pool exists in Cisco Catalyst Center # based on the provided name and site name - reserve_pool.append(self.reserve_pool_exists(name, site_name)) - if not reserve_pool[reserve_pool_index].get("success"): + if self.reserve_pool_exists(name, site_name): + reserve_pool.append(self.reserve_pool_exists(name, site_name)) + else: + self.have.update({"reservePool": reserve_pool}) + return self + + if name != "" and not reserve_pool[reserve_pool_index].get("success"): + return self.check_return_status() + elif name == "" and len(reserve_pool[reserve_pool_index]) < 1: return self.check_return_status() - self.log("Reserved pool details for '{0}': {1}".format(name, reserve_pool[reserve_pool_index]), "DEBUG") - # If the Reserved Pool doesn't exist and a previous name is provided - # Else try using the previous name - prev_name = item.get("prev_name") - if reserve_pool[reserve_pool_index].get("exists") is False and \ - prev_name is not None: - reserve_pool.pop() - reserve_pool.append(self.reserve_pool_exists(prev_name, site_name)) - if not reserve_pool[reserve_pool_index].get("success"): - return self.check_return_status() - - # If the previous name doesn't exist in Cisco Catalyst Center, return with error - if reserve_pool[reserve_pool_index].get("exists") is False: - self.msg = "Prev name {0} doesn't exist in reserve_pool_details".format(prev_name) - self.status = "failed" - return self + self.log("Reserved pool details for {0}: {1}" + .format(name, reserve_pool[reserve_pool_index]), "DEBUG") + + if name != "": + # If the Reserved Pool doesn't exist and a previous name is provided + # Else try using the previous name + prev_name = item.get("prev_name") + if reserve_pool[reserve_pool_index].get("exists") is False and prev_name is not None: + self.log("Current pool does not exist. Checking for previous name '{0}'." + .format(prev_name), "DEBUG") + reserve_pool.pop() + reserve_pool.append(self.reserve_pool_exists(prev_name, site_name)) + if not reserve_pool[reserve_pool_index].get("success"): + return self.check_return_status() + + # If the previous name doesn't exist in Cisco Catalyst Center, return with error + if reserve_pool[reserve_pool_index].get("exists") is False: + self.msg = "Prev name {0} doesn't exist in reserve_pool_details".format(prev_name) + self.status = "failed" + return self - self.log("Reserved pool exists: {0}".format(reserve_pool[reserve_pool_index].get("exists")), "DEBUG") - self.log("Reserved pool: {0}".format(reserve_pool[reserve_pool_index].get("details")), "DEBUG") + self.log("Reserved pool exists: {0}".format(reserve_pool[reserve_pool_index].get("exists")), "DEBUG") + self.log("Reserved pool: {0}".format(reserve_pool[reserve_pool_index].get("details")), "DEBUG") - # If reserve pool exist, convert ipv6AddressSpace to the required format (boolean) - if reserve_pool[reserve_pool_index].get("exists"): - reserve_pool_info = reserve_pool[reserve_pool_index].get("details") - if reserve_pool_info.get("ipv6AddressSpace") == "False": - reserve_pool_info.update({"ipv6AddressSpace": False}) - else: - reserve_pool_info.update({"ipv6AddressSpace": True}) + # If reserve pool exist, convert ipv6AddressSpace to the required format (boolean) + if reserve_pool[reserve_pool_index].get("exists"): + reserve_pool_info = reserve_pool[reserve_pool_index].get("details") + if reserve_pool_info.get("ipv6AddressSpace") == "False": + reserve_pool_info.update({"ipv6AddressSpace": False}) + else: + reserve_pool_info.update({"ipv6AddressSpace": True}) reserve_pool_index += 1 @@ -3719,6 +3824,73 @@ def get_diff_merged(self, config): self.update_network(network_management).check_return_status() return self + def delete_ip_pool(self, name, pool_id, function_name, pool_type): + """ + Delete single reserve/global pool from based on the pool ID and return + execution id and status message. + + Parameters: + name (str) - name contains ip pool name for release ip pool + pool_id (str) - ID contails IP pool id from get ip pool + function_name (str) - contains execution of sdk function name either + release_reserve_ip_subpool or delete_global_ip_pool + pool_type (str) - contains string message for log either Reserve or Global + + Returns: + execution_details (dict) - contains response for the delete execution + contains name, execution id and status message. + """ + self.log("{0} IP pool scheduled for deletion: {1}".format(pool_type, name), "INFO") + self.log("{0} pool '{1}' id: {2}".format(pool_type, name, pool_id), "DEBUG") + try: + response = self.dnac._exec( + family="network_settings", + function=function_name, + op_modifies=True, + params={"id": pool_id}, + ) + self.check_execution_response_status(response, + function_name).check_return_status() + self.log("Response received from delete {0} pool API: {1}". + format(pool_type, self.pprint(response)), "DEBUG") + execution_id = response.get("executionId") + success_msg, failed_msg = None, None + + if pool_type == "Global": + success_msg = "Global pool deleted successfully" + failed_msg = "Unable to delete global pool reservation" + else: + success_msg = "Ip subpool reservation released successfully" + failed_msg = "Unable to release subpool reservation" + + if execution_id: + return { + "name": name, + "execution_id": execution_id, + "msg": success_msg, + "status": "success" + } + self.log("No execution ID received for '{name}'".format(name=name), "ERROR") + return { + "name": name, + "execution_id": None, + "msg": failed_msg, + "status": "failed" + } + + except Exception as e: + error_msg = ( + "Exception occurred while deleting the {type} pool with the name '{name}': {error}" + .format(name=name, error=str(e), type=pool_type) + ) + self.log(error_msg, "ERROR") + return { + "name": name, + "execution_id": None, + "msg": error_msg, + "status": "failed" + } + def delete_reserve_pool(self, reserve_pool_details): """ Delete a Reserve Pool by name in Cisco Catalyst Center @@ -3729,47 +3901,68 @@ def delete_reserve_pool(self, reserve_pool_details): Returns: self - The current object with Global Pool, Reserved Pool, Network Servers information. """ - reserve_pool_index = -1 for item in reserve_pool_details: reserve_pool_index += 1 - name = item.get("name") - reserve_pool_exists = self.have.get("reservePool")[reserve_pool_index].get("exists") + delete_all = item.get("force_delete") + site_name = item.get("site_name") result_reserve_pool = self.result.get("response")[1].get("reservePool") - - if not reserve_pool_exists: - result_reserve_pool.get("msg").update({name: "Reserve Pool not found"}) - self.log("Reserved Ip Subpool '{0}' not found".format(name), "INFO") - continue - - self.log("Reserved IP pool scheduled for deletion: {0}" - .format(self.have.get("reservePool")[reserve_pool_index].get("name")), "INFO") - _id = self.have.get("reservePool")[reserve_pool_index].get("id") - self.log("Reserved pool '{0}' id: {1}".format(name, _id), "DEBUG") - try: - response = self.dnac._exec( - family="network_settings", - function="release_reserve_ip_subpool", - op_modifies=True, - params={"id": _id}, - ) - except Exception as msg: - self.msg = ( - "Exception occurred while updating the reserved pool with the name '{name}': {msg}" - .format(name=name, msg=msg) + if delete_all: + self.log("Delete all reserved pools operation initiated for site '{0}'" + .format(site_name), "INFO") + have_reserve_pool = reserve_pool = self.have.get("reservePool")[reserve_pool_index] + result_reserve_pool.get("response").update({site_name: []}) + if have_reserve_pool and len(have_reserve_pool) > 0: + if isinstance(have_reserve_pool, dict): + self.log("Found reserved pools for site '{0}': {1}" + .format(site_name, self.pprint(have_reserve_pool)), "DEBUG") + reserve_pool = have_reserve_pool.get("exists") + if not reserve_pool: + result_reserve_pool.get("msg").update({site_name: "Reserve Pool not found"}) + self.log("Reserved IP Subpool '{0}' not found".format(site_name), "INFO") + continue + + for each_pool in have_reserve_pool: + if not each_pool.get("exists"): + result_reserve_pool["msg"].update({site_name: "Reserve Pool not found"}) + self.log("Reserved IP Subpool '{0}' not found for deletion".format(site_name), "INFO") + continue + + pool_name = each_pool.get("details", {}).get("name") + pool_id = each_pool.get("id") + self.log("Processing deletion for reserved pool '{0}' with ID '{1}'" + .format(pool_name, pool_id), "INFO") + execution_details = self.delete_ip_pool(pool_name, pool_id, + "release_reserve_ip_subpool", + "Reserve") + result_reserve_pool["response"][site_name].append(execution_details) + self.log("Deletion completed for reserved pool '{0}' with ID '{1}'" + .format(pool_name, pool_id), "DEBUG") + else: + result_reserve_pool["msg"].update({site_name: "No Reserve Pools available"}) + self.log("No Reserved IP Subpools found for site '{0}'. Skipping deletion." + .format(site_name), "INFO") + else: + pool_name = item.get("name") + pool_id = self.have.get("reservePool")[reserve_pool_index].get("id") + self.log("Delete operation initiated for specific reserved pool '{0}'".format(pool_name), "INFO") + + reserve_pool = None + if self.have.get("reservePool"): + reserve_pool = self.have.get("reservePool")[reserve_pool_index].get("exists") + + if not reserve_pool: + result_reserve_pool.get("msg").update({pool_name: "Reserve Pool not found"}) + self.log("Reserved IP Subpool '{0}' not found. Skipping deletion.".format(pool_name), "INFO") + continue + + self.log("Reserved IP pool '{0}' scheduled for deletion".format(pool_name), "INFO") + self.log("Reserved pool '{0}' ID: {1}".format(pool_name, pool_id), "DEBUG") + execution_details = self.delete_ip_pool( + pool_name, pool_id, "release_reserve_ip_subpool", "Reserve" ) - self.log(str(msg), "ERROR") - self.status = "failed" - return self - - self.check_execution_response_status(response, "release_reserve_ip_subpool").check_return_status() - executionid = response.get("executionId") - result_reserve_pool = self.result.get("response")[1].get("reservePool") - result_reserve_pool.get("response").update({name: {}}) - result_reserve_pool.get("response").get(name) \ - .update({"Execution Id": executionid}) - result_reserve_pool.get("msg") \ - .update({name: "Ip subpool reservation released successfully"}) + self.log("Deletion completed for reserved pool '{0}' with ID '{1}'".format(pool_name, pool_id), "DEBUG") + result_reserve_pool["response"].update({pool_name: execution_details}) self.msg = "Reserved pool(s) released successfully" self.status = "success" @@ -3789,40 +3982,41 @@ def delete_global_pool(self, global_pool_details): result_global_pool = self.result.get("response")[0].get("globalPool") global_pool_index = 0 for item in self.have.get("globalPool"): - global_pool_exists = item.get("exists") - name = global_pool_details.get("settings").get("ip_pool")[global_pool_index].get("name") - global_pool_index += 1 - if not global_pool_exists: - result_global_pool.get("msg").update({name: "Global Pool not found"}) - self.log("Global pool '{0}' not found".format(name), "INFO") - continue - - id = item.get("id") - try: - response = self.dnac._exec( - family="network_settings", - function="delete_global_ip_pool", - op_modifies=True, - params={"id": id}, - ) - except Exception as msg: - self.msg = ( - "Exception occurred while deleting the global pool with '{name}': {msg}" - .format(name=name, msg=msg) - ) - self.log(str(msg), "ERROR") - self.status = "failed" - return self - - # Check the execution status - self.check_execution_response_status(response, "delete_global_ip_pool").check_return_status() - executionid = response.get("executionId") - - # Update result information - result_global_pool = self.result.get("response")[0].get("globalPool") - result_global_pool.get("response").update({name: {}}) - result_global_pool.get("response").get(name).update({"Execution Id": executionid}) - result_global_pool.get("msg").update({name: "Global pool deleted successfully"}) + if isinstance(item, list): + self.log("Processing global pool deletion for a list of items", "INFO") + for each_item in item: + global_pool_exists = each_item.get("exists") + pool_name = each_item.get("details").get("ipPoolName") + global_pool_index += 1 + + if not global_pool_exists: + result_global_pool.get("msg").update({pool_name: "Global Pool not found"}) + self.log("Global pool '{0}' not found".format(pool_name), "INFO") + continue + + execution_details = {} + pool_id = each_item.get("id") + execution_details = self.delete_ip_pool(pool_name, pool_id, + "delete_global_ip_pool", + "Global") + self.log("Deletion completed for global pool '{0}'".format(pool_name), "DEBUG") + result_global_pool["response"][pool_name].append(execution_details) + else: + self.log("Processing global pool deletion for a single item", "INFO") + global_pool_exists = item.get("exists") + pool_name = global_pool_details.get("settings").get("ip_pool")[global_pool_index].get("name") + global_pool_index += 1 + if not global_pool_exists: + result_global_pool.get("msg").update({pool_name: "Global Pool not found"}) + self.log("Global pool '{0}' not found. Skipping deletion.".format(pool_name), "INFO") + continue + + self.log("Global pool '{0}' exists. Proceeding with deletion.".format(pool_name), "INFO") + id = item.get("id") + execution_details = self.delete_ip_pool(pool_name, id, + "delete_global_ip_pool", + "Global") + result_global_pool.get("response").update({pool_name: execution_details}) self.msg = "Global pools deleted successfully" self.status = "success" @@ -3949,37 +4143,105 @@ def verify_diff_deleted(self, config): self.get_have(config) self.log("Current State (have): {0}".format(self.have), "INFO") self.log("Desired State (want): {0}".format(self.want), "INFO") + delete_all = [] if config.get("global_pool_details") is not None: + self.log("Starting validation for Global Pool absence.", "INFO") global_pool_index = 0 global_pool_details = self.have.get("globalPool") for item in global_pool_details: - global_pool_exists = item.get("exists") - name = config.get("global_pool_details").get("settings") \ - .get("ip_pool")[global_pool_index].get("name") - if global_pool_exists: - self.msg = "Global Pool Config '{0}' is not applied to the Cisco Catalyst Center" \ - .format(name) - self.status = "failed" - return self + if isinstance(item, dict): + global_pool_exists = item.get("exists") + name = config.get("global_pool_details").get("settings")\ + .get("ip_pool")[global_pool_index].get("name") + if global_pool_exists: + self.msg = "Global Pool Config '{0}' is not applied to the Cisco Catalyst Center".format(name) + self.status = "failed" + return self + + self.log("Successfully validated absence of Global Pool '{0}'.". + format(name), "INFO") + else: + if len(item) > 0: + for each_ip_pool in item: + each_pool_validation = {} + global_pool_exists = each_ip_pool.get("exists") + name = each_ip_pool.get("details", {}).get("ipPoolName") + if global_pool_exists: + each_pool_validation = { + "name": name, + "msg": "Global Pool Config is not applied to the Catalyst Center", + "validation": "failed", + } + delete_all.append(each_pool_validation) - self.log("Successfully validated absence of Global Pool '{0}'.".format(name), "INFO") global_pool_index += 1 + + if len(delete_all) > 0: + self.msg = "Global Pool Config is not applied to the Catalyst Center" + self.set_operation_result("failed", False, self.msg, + "ERROR", delete_all).check_return_status() + self.result.get("response")[0].get("globalPool").update({"Validation": "Success"}) + self.msg = "Successfully validated the absence of Global Pool." + self.log(self.msg, "INFO") + self.log("Last Check {0}".format(self.result.get("response")[0].get("globalPool")), "INFO") + del_response = self.result.get("response")[0].get("globalPool").get("response") + delete_all.append(del_response) + self.set_operation_result("success", True, self.msg, + "INFO", del_response).check_return_status() + if config.get("reserve_pool_details") is not None: + self.log("Starting validation for Reserve Pool absence.", "INFO") reserve_pool_index = 0 reserve_pool_details = self.have.get("reservePool") + for item in reserve_pool_details: - reserve_pool_exists = item.get("exists") - name = config.get("reserve_pool_details")[reserve_pool_index].get("name") - if reserve_pool_exists: - self.msg = "Reserved Pool Config '{0}' is not applied to the Catalyst Center" \ - .format(name) - self.status = "failed" - return self + site_name = config.get("reserve_pool_details")[reserve_pool_index].get("site_name") + if isinstance(item, dict): + reserve_pool_exists = item.get("exists") + name = config.get("reserve_pool_details")[reserve_pool_index].get("name") + + if reserve_pool_exists: + self.msg = "Reserved Pool Config '{0}' is not applied to the Catalyst Center"\ + .format(name) + self.set_operation_result("failed", False, self.msg, "ERROR").check_return_status() + + self.log("Successfully validated the absence of Reserve Pool '{0}'.".format(name), "INFO") + reserve_pool_index += 1 - self.log("Successfully validated the absence of Reserve Pool '{0}'.".format(name), "INFO") - self.result.get("response")[1].get("reservePool").update({"Validation": "Success"}) + if self.result.get("response"): + self.result.get("response")[1].get("reservePool").update({"Validation": "Success"}) + + else: + if len(item) > 0: + for each_ip_pool in item: + each_pool_validation = {} + reserve_pool_exists = each_ip_pool.get("exists") + if reserve_pool_exists: + self.log("Reserve Pool '{0}' found. Marking as failed validation." + .format(name), "ERROR") + each_pool_validation = { + "site_name": site_name, + "name": each_ip_pool.get("details", {}).get("name"), + "msg": "Reserved Pool Config is not applied to the Catalyst Center", + "validation": "failed", + } + delete_all.append(each_pool_validation) + + reserve_pool_index += 1 + + if len(delete_all) > 0: + self.msg = "Reserved Pool Config is not applied to the Catalyst Center" + self.set_operation_result("failed", False, self.msg, + "ERROR", delete_all).check_return_status() + + self.msg = "Successfully validated the absence of Reserve Pool." + self.log(self.msg, "INFO") + del_response = self.result.get("response")[1].get("reservePool").get("response") + delete_all.append(del_response) + self.set_operation_result("success", True, self.msg, + "INFO", del_response).check_return_status() self.msg = "Successfully validated the absence of Global Pool/Reserve Pool" self.status = "success" diff --git a/plugins/modules/pnp_workflow_manager.py b/plugins/modules/pnp_workflow_manager.py index 4360d26dcb..36f9733e61 100644 --- a/plugins/modules/pnp_workflow_manager.py +++ b/plugins/modules/pnp_workflow_manager.py @@ -916,6 +916,11 @@ class instance for further use. self.result['diff'] = self.validated_config self.result['changed'] = True return self + elif len(bulk_params.get("failureList")) > 0: + self.msg = "Unable to import below {0} device(s). ".format( + len(bulk_params.get("failureList"))) + self.set_operation_result("failed", False, self.msg, "ERROR", + bulk_params).check_return_status() self.msg = "Bulk import failed" self.log(self.msg, "CRITICAL") diff --git a/plugins/modules/provision_workflow_manager.py b/plugins/modules/provision_workflow_manager.py index 5e68d55c8b..632306530b 100644 --- a/plugins/modules/provision_workflow_manager.py +++ b/plugins/modules/provision_workflow_manager.py @@ -392,7 +392,7 @@ def validate_input(self, state=None): 'elements': 'str'}, "dynamic_interfaces": {'type': 'list', 'required': False, 'elements': 'dict'}, - "skip_ap_provision": {'type': 'bool', 'required': False, "default": True}, + "skip_ap_provision": {'type': 'bool', 'required': False}, "rolling_ap_upgrade": {'type': 'dict', 'required': False}, "provisioning": {'type': 'bool', 'required': False, "default": True}, "force_provisioning": {'type': 'bool', 'required': False, "default": False} @@ -695,6 +695,46 @@ def is_device_assigned_to_site(self, uuid): self.log(msg, "CRITICAL") self.module.fail_json(msg=msg) + def is_device_assigned_to_site_v1(self, uuid): + """ + Checks if a device, specified by its UUID, is assigned to any site. + + Parameters: + - self: The instance of the class containing the 'config' attribute + to be validated. + - uuid (str): The UUID of the device to check for site assignment. + Returns: + - tuple: (bool, Optional[str]) + - True and the site name if the device is assigned to a site. + - False and None if not assigned or in case of an error.. + + """ + + self.log("Checking site assignment for device with UUID: {0}".format(uuid), "INFO") + try: + site_api_response = self.dnac_apply['exec']( + family="site_design", + function='get_site_assigned_network_device', + params={"id": uuid} + ) + + self.log("Response collected from the API 'get_site_assigned_network_device' {0}".format(site_api_response)) + site_response = site_api_response.get("response") + + if site_response: + site_name = site_response.get("siteNameHierarchy") + if site_name: + self.log("Device with UUID {0} is assigned to site: {1}".format(uuid, site_name), "INFO") + return True, site_name + + self.log("Device with UUID {0} is not assigned to any site.".format(uuid), "INFO") + return False, None + + except Exception as e: + msg = "Failed to find device with UUID {0} due to: {1}".format(uuid, e) + self.log(msg, "CRITICAL") + self.module.fail_json(msg=msg) + def get_device_site_by_uuid(self, uuid): """ Checks if a device is assigned to any site. @@ -802,23 +842,42 @@ def get_wireless_params(self): } ] - if self.compare_dnac_versions(self.get_ccc_version(), "2.3.5.3") <= 0: - self.log("Checking for managed AP locations in Catalyst Center version <= 2.3.5.3", "DEBUG") - ap_locations = wireless_params[0].get("managedAPLocations") - - if not (ap_locations and isinstance(ap_locations, list)): - self.log("Validating AP locations: {0}".format(ap_locations), "DEBUG") - msg = "Missing Managed AP Locations: Please specify the intended location(s) for the wireless device \ - within the site hierarchy." - self.log(msg, "CRITICAL") - self.module.fail_json(msg=msg, response=[]) - - for ap_loc in self.validated_config.get("managed_ap_locations"): - site_type = self.get_site_type(site_name_hierarchy=ap_loc) - self.log("Checking site type for AP location '{0}', resolved type: '{1}'".format(ap_loc, site_type), "DEBUG") - if site_type != "floor": - self.log("Managed AP Location must be a floor", "CRITICAL") - self.module.fail_json(msg="Managed AP Location must be a floor", response=[]) + ap_locations = wireless_params[0].get("managedAPLocations") + if not ap_locations or not isinstance(ap_locations, list): + self.log("Validating AP locations: {0}".format(ap_locations), "DEBUG") + msg = "Missing Managed AP Locations: Please specify the intended location(s) for the wireless device \ + within the site hierarchy." + self.log(msg, "CRITICAL") + self.module.fail_json(msg=msg, response=[]) + + self.floor_names = [] + for ap_loc in self.validated_config.get("managed_ap_locations"): + self.log("Processing AP location: {0}".format(ap_loc), "DEBUG") + site_type = self.get_site_type(site_name_hierarchy=ap_loc) + self.log("Resolved site type for AP location '{0}': '{1}'".format(ap_loc, site_type), "DEBUG") + + if site_type == "floor": + self.log("Adding '{0}' to floor names list".format(ap_loc), "DEBUG") + self.floor_names.append(ap_loc) + elif site_type == "building": + self.log("Building site type detected for '{0}'. Retrieving floor details.".format(ap_loc), "DEBUG") + building_name = ap_loc + ".*" + floors = self.get_site(building_name) + + for item in floors['response']: + if item.get('type') == 'floor': + self.log("Floor found: '{0}' for building '{1}'".format(item['nameHierarchy'], ap_loc), "DEBUG") + self.floor_names.append(item['nameHierarchy']) + elif 'additionalInfo' in item: + for additional_info in item['additionalInfo']: + if 'attributes' in additional_info and additional_info['attributes'].get('type') == 'floor': + self.log("Floor found in additionalInfo: '{0}' for building '{1}'".format(item['siteNameHierarchy'], ap_loc), "DEBUG") + self.floor_names.append(item['siteNameHierarchy']) + else: + self.log("Invalid site type detected for '{0}'. Managed AP Location must be building or floor.".format(ap_loc), "CRITICAL") + self.module.fail_json(msg="Managed AP Location must be building or floor", response=[]) + + self.log("Final list of floor names: {0}".format(self.floor_names), "DEBUG") wireless_params[0]["dynamicInterfaces"] = [] if self.validated_config.get("dynamic_interfaces"): @@ -873,11 +932,15 @@ def get_want(self, config): self.validated_config = config self.want = {} + self.device_ip = self.validated_config["management_ip_address"] + state = self.params.get("state") self.want["device_type"] = self.get_dev_type() + if self.want["device_type"] == "wired": self.want["prov_params"] = self.get_wired_params() elif self.want["device_type"] == "wireless": - self.want["prov_params"] = self.get_wireless_params() + if state.lower() == "merged": + self.want["prov_params"] = self.get_wireless_params() else: self.log("Passed devices are neither wired or wireless devices", "WARNING") @@ -904,6 +967,7 @@ def perform_wireless_reprovision(self): """ device_id = self.get_device_id() self.log("Retrieved device ID: {0}".format(device_id), "DEBUG") + prov_params = self.want.get("prov_params")[0] already_provisioned_site = self.get_device_site_by_uuid(device_id) if already_provisioned_site != self.site_name: @@ -915,13 +979,22 @@ def perform_wireless_reprovision(self): self.status = "failed" self.check_return_status() + param = [ + { + "deviceName": prov_params.get("deviceName"), + "site": prov_params.get("site"), + "managedAPLocations": self.floor_names, + "dynamicInterfaces": prov_params.get("dynamicInterfaces") + } + ] + try: headers_payload = {"__persistbapioutput": "true"} response = self.dnac_apply['exec']( family="wireless", function="provision_update", op_modifies=True, - params={"payload": self.want.get("prov_params"), + params={"payload": param, "headers": headers_payload} ) self.log("Wireless provisioning response collected from 'provision_update' API is: {0}".format(str(response)), "DEBUG") @@ -1309,14 +1382,13 @@ def provision_wireless_device(self): site_name = self.validated_config.get("site_name_hierarchy") primary_ap_location = prov_params_data.get("primaryManagedAPLocationsSiteIds") secondary_ap_location = prov_params_data.get("secondaryManagedAPLocationsSiteIds") + site_exist, site_id = self.get_site_id(site_name) self.log("Provisioning wireless device with device_id: {0}".format(device_uid), "DEBUG") self.log("Site name: {0}".format(site_name), "DEBUG") self.log("Primary AP location: {0}".format(primary_ap_location), "DEBUG") self.log("Secondary AP location: {0}".format(secondary_ap_location), "DEBUG") - site_exist, site_id = self.get_site_id(site_name) - if self.compare_dnac_versions(self.get_ccc_version(), "2.3.7.6") <= 0: primary_ap_location_site_id_list = [] secondary_ap_location_site_id_list = [] @@ -1336,13 +1408,23 @@ def provision_wireless_device(self): secondary_ap_location_site_id_list.append(secondary_ap_location_site_id) if self.compare_dnac_versions(self.get_ccc_version(), "2.3.5.3") <= 0: + + param = [ + { + "deviceName": prov_params[0].get("deviceName"), + "site": prov_params[0].get("site"), + "managedAPLocations": self.floor_names, + "dynamicInterfaces": prov_params[0].get("dynamicInterfaces") + } + ] + self.log("Detected Catalyst Center version <= 2.3.5.3; using old provisioning method", "INFO") try: response = self.dnac_apply['exec']( family="wireless", function="provision", op_modifies=True, - params={"payload": self.want.get("prov_params")} + params={"payload": param} ) execution_id = response.get("executionId") self.log("Received execution ID for provisioning: {0}".format(execution_id), "DEBUG") @@ -1364,11 +1446,27 @@ def provision_wireless_device(self): else: self.log("Detected Catalyst Center version > 2.3.5.3; using new provisioning method", "INFO") self.log("Checking if device is assigned to the site", "INFO") - is_device_assigned = self.is_device_assigned_to_site(device_uid) - if not is_device_assigned: + is_device_assigned_to_a_site, device_site_name = self.is_device_assigned_to_site_v1(device_uid) + + if is_device_assigned_to_a_site is False: self.log("Device {0} is not assigned to site {1}; assigning now.".format(device_uid, site_name), "INFO") self.assign_device_to_site([device_uid], site_name, site_id) + device_id = self.get_device_id() + self.log("Retrieved device ID: {0}".format(device_id), "DEBUG") + + # is_device_assigned_to_given_site, device_site_name = self.is_device_assigned_to_site_v1(device_uid, site_id) + is_device_assigned_to_a_site, device_site_name = self.is_device_assigned_to_site_v1(device_uid) + + if is_device_assigned_to_a_site is True: + if device_site_name != self.site_name: + self.msg = ("Error in re-provisioning a wireless device '{0}' - the device is already associated " + "with a Site {1} and cannot be re-provisioned to Site {2}.".format(self.device_ip, device_site_name, self.site_name)) + self.log(self.msg, "ERROR") + self.result['response'] = self.msg + self.status = "failed" + self.check_return_status() + if primary_ap_location or secondary_ap_location: self.log("Assigning managed AP locations to device ID: {0}".format(device_uid), "INFO") try: @@ -1390,6 +1488,16 @@ def provision_wireless_device(self): self.check_tasks_response_status(response, api_name='assign_managed_ap_locations_for_w_l_c') if self.status not in ["failed", "exited"]: self.log("wireless Device '{0}' assign_managed_ap_locations_for_w_l_c completed successfully.".format(self.device_ip), "INFO") + + if self.status == 'failed': + fail_reason = self.msg + self.log("Exception occurred during 'assign_managed_ap_locations_for_w_l_c': {0}".format(str(fail_reason)), "ERROR") + self.msg = "Error in 'assign_managed_ap_locations_for_w_l_c' '{0}' due to {1}".format(self.device_ip, str(fail_reason)) + self.log(self.msg, "ERROR") + self.status = "failed" + self.result['response'] = self.msg + self.check_return_status() + except Exception as e: self.log("Exception occurred during 'assign_managed_ap_locations_for_w_l_c': {0}".format(str(e)), "ERROR") self.msg = "Error in 'assign_managed_ap_locations_for_w_l_c' '{0}' due to {1}".format(self.device_ip, str(e)) @@ -1397,7 +1505,6 @@ def provision_wireless_device(self): self.status = "failed" self.result['response'] = self.msg self.check_return_status() - self.log(self.want.get("prov_params")) self.log("Starting wireless controller provisioning for device ID: {0}".format(device_uid), "INFO") prov_params = self.want.get("prov_params")[0] @@ -1496,42 +1603,69 @@ def get_diff_deleted(self): return self if self.compare_dnac_versions(self.get_ccc_version(), "2.3.5.3") <= 0: - response = self.dnac_apply['exec']( - family="sda", - function="delete_provisioned_wired_device", - op_modifies=True, - params={ - "device_management_ip_address": self.validated_config["management_ip_address"] - }, - ) - self.log("Response collected from the 'delete_provisioned_wired_device' API is : {0}".format(str(response)), "DEBUG") - task_id = response.get("taskId") - deletion_info = self.get_task_status(task_id=task_id) - self.result["changed"] = True - self.result['msg'] = "Deletion done Successfully" - self.result['diff'] = self.validated_config - self.result['response'] = task_id - self.log(self.result['msg'], "INFO") - return self + try: + response = self.dnac_apply['exec']( + family="sda", + function="delete_provisioned_wired_device", + op_modifies=True, + params={ + "device_management_ip_address": self.validated_config["management_ip_address"] + }, + ) + self.log("Response collected from the 'delete_provisioned_wired_device' API is : {0}".format(str(response)), "DEBUG") - else: - response = self.dnac._exec( - family="sda", - function='delete_provisioned_devices', - op_modifies=True, - params={'networkDeviceId': device_id}, - ) - self.log("Received API response from 'delete_provisioned_devices': {0}".format(str(response)), "DEBUG") - self.check_tasks_response_status(response, api_name='delete_provisioned_devices') - if self.status not in ["failed", "exited"]: + task_id = response.get("taskId") + deletion_info = self.get_task_status(task_id=task_id) self.result["changed"] = True - self.result['msg'] = "Deletion done Successfully for the device '{0}' ".format(self.validated_config["management_ip_address"]) + self.result['msg'] = "Deletion done Successfully" self.result['diff'] = self.validated_config - self.result['response'] = self.result['msg'] + self.result['response'] = task_id self.log(self.result['msg'], "INFO") return self + except Exception as e: + self.msg = "Error in delete provisioned device '{0}' due to {1}".format(self.device_ip, str(e)) + self.log(self.msg, "ERROR") + self.status = "failed" + self.result['response'] = self.msg + self.check_return_status() + + else: + try: + response = self.dnac._exec( + family="sda", + function='delete_provisioned_devices', + op_modifies=True, + params={'networkDeviceId': device_id}, + ) + self.log("Received API response from 'delete_provisioned_devices': {0}".format(str(response)), "DEBUG") + self.check_tasks_response_status(response, api_name='delete_provisioned_devices') + + if self.status not in ["failed", "exited"]: + self.result["changed"] = True + self.result['msg'] = "Deletion done Successfully for the device '{0}' ".format(self.validated_config["management_ip_address"]) + self.result['diff'] = self.validated_config + self.result['response'] = self.result['msg'] + self.log(self.result['msg'], "INFO") + return self + + if self.status in ['failed', 'exited']: + fail_reason = self.msg + self.log("Exception occurred during 'delete_provisioned_devices': {0}".format(str(fail_reason)), "ERROR") + self.msg = "Error in delete provisioned device '{0}' due to {1}".format(self.device_ip, str(fail_reason)) + self.log(self.msg, "ERROR") + self.status = "failed" + self.result['response'] = self.msg + self.check_return_status() + + except Exception as e: + self.msg = "Error in delete provisioned device '{0}' due to {1}".format(self.device_ip, str(e)) + self.log(self.msg, "ERROR") + self.status = "failed" + self.result['response'] = self.msg + self.check_return_status() + def verify_diff_merged(self): """ Verify the merged status(Creation/Updation) of Discovery in Cisco Catalyst Center. diff --git a/plugins/modules/sda_host_port_onboarding_workflow_manager.py b/plugins/modules/sda_host_port_onboarding_workflow_manager.py index d811d0f682..36a2aa9fbb 100644 --- a/plugins/modules/sda_host_port_onboarding_workflow_manager.py +++ b/plugins/modules/sda_host_port_onboarding_workflow_manager.py @@ -3661,21 +3661,21 @@ def get_have(self, config, state): port_channel_details = config.get("port_channels") wireless_ssids_details = config.get("wireless_ssids") fabric_site_name_hierarchy = config.get("fabric_site_name_hierarchy") - ip_address = config.get("ip_address") + ip_address = [config.get("ip_address")] hostname = config.get("hostname") fabric_id = self.get_fabric_id(fabric_site_name_hierarchy) have = {"fabric_id": fabric_id, "fabric_site_name_hierarchy": fabric_site_name_hierarchy} def update_network_details(): - nonlocal ip_address - mgmt_ip_to_instance_id_map = self.get_network_device_id(ip_address, hostname) + # nonlocal ip_address + mgmt_ip_to_instance_id_map = self.get_network_device_id(ip_address[0], hostname) network_device_id = list(mgmt_ip_to_instance_id_map.values())[0] - ip_address = list(mgmt_ip_to_instance_id_map.keys())[0] - self.validate_device_in_fabric(ip_address) + ip_address[0] = list(mgmt_ip_to_instance_id_map.keys())[0] + self.validate_device_in_fabric(ip_address[0]) have.update({ "mgmt_ip_to_instance_id_map": mgmt_ip_to_instance_id_map, - "ip_address": ip_address, + "ip_address": ip_address[0], "network_device_id": network_device_id, "get_port_assignments_params": self.get_port_assignments_params(network_device_id, fabric_id), "get_port_channels_params": self.get_port_channels_params(network_device_id, fabric_id) diff --git a/plugins/modules/swim_workflow_manager.py b/plugins/modules/swim_workflow_manager.py index 1b85b80390..df7ddc0673 100644 --- a/plugins/modules/swim_workflow_manager.py +++ b/plugins/modules/swim_workflow_manager.py @@ -937,62 +937,60 @@ def get_device_uuids(self, site_name, device_family, device_role, device_series_ for item_dict in item['response']: site_response_list.append(item_dict) else: - try: - response = self.dnac._exec( - family="site_design", - function='get_site_assigned_network_devices', - op_modifies=True, - params={"site_id": site_id}, - ) - self.log("Received API response from 'get_site_assigned_network_devices': {0}".format(str(response)), "DEBUG") - response = response.get('response') + site_names = site_name + ".*" + get_site_names = self.get_site(site_names) + self.log("Fetched site names: {0}".format(str(get_site_names)), "DEBUG") + + site_info = {} + + for item in get_site_names['response']: + if 'nameHierarchy' in item and 'id' in item: + site_info[item['nameHierarchy']] = item['id'] + + for site_name, site_id in site_info.items(): + try: + response = self.dnac._exec( + family="site_design", + function='get_site_assigned_network_devices', + params={"site_id": site_id}, + ) + self.log("Received API response from 'get_site_assigned_network_devices': {0}".format(str(response)), "DEBUG") + devices = response.get('response') + if not devices: + self.log("No devices found for site - '{0}'.". format(site_name), "WARNING") + continue - if not response: - self.log("No devices found for site '{0}'.". format(site_name), "WARNING") + for device_id in devices: + device_id_list.append(device_id.get("deviceId")) - except Exception as e: - self.log("Unable to fetch the device(s) associated to the site '{0}' due to '{1}'".format(site_name, str(e)), "WARNING") - return device_uuid_list + except Exception as e: + self.log("Unable to fetch the device(s) associated to the site '{0}' due to '{1}'".format(site_name, str(e)), "WARNING") + return device_uuid_list - for device_id in response: - device_id_list.append(device_id.get("deviceId")) + for device_id in device_id_list: + self.log("Processing device_id: {0}".format(device_id)) + try: + device_list_response = self.dnac._exec( + family="devices", + function="get_device_list", + params={"id": device_id}, + ) - try: - device_params = {} - offset = 0 - limit = self.get_device_details_limit() - initial_exec = False + self.log("Received API response from 'get_device_list': {0}".format(str(device_list_response)), "DEBUG") - while True: - if initial_exec: - device_params["limit"] = limit - device_params["offset"] = offset * limit - device_list_response = self.dnac._exec( - family="devices", - function='get_device_list', - params=device_params - ) - else: - initial_exec = True - device_list_response = self.dnac._exec( - family="devices", - function='get_device_list', - op_modifies=True - ) - offset = offset + 1 - self.log("Received API response from 'device_list_response': {0}".format(str(device_list_response)), "DEBUG") - device_response = device_list_response.get('response') + device_response = device_list_response.get("response") if not device_response: - break + self.log("No device data found for device_id: {0}".format(device_id), "INFO") + continue for device in device_response: if device.get("instanceUuid") in device_id_list: if device_family is None or device.get("family") == device_family: site_response_list.append(device) - except Exception as e: - self.log("Unable to fetch the device(s) associated to the site '{0}' due to '{1}'".format(site_name, str(e)), "WARNING") - return device_uuid_list + except Exception as e: + self.log("Unable to fetch devices for site '{0}' due to: {1}".format(site_name, str(e)), "WARNING") + return device_uuid_list self.device_ips = [] for item in site_response_list: @@ -1034,7 +1032,7 @@ def get_device_uuids(self, site_name, device_family, device_role, device_series_ offset = offset + 1 device_response = device_list_response.get('response') - if not response or not device_response: + if not device_response: self.log("Failed to retrieve devices associated with the site '{0}' due to empty API response.".format(site_name), "INFO") break diff --git a/plugins/modules/template_workflow_manager.py b/plugins/modules/template_workflow_manager.py index b5fc0b1edf..9d7565c431 100644 --- a/plugins/modules/template_workflow_manager.py +++ b/plugins/modules/template_workflow_manager.py @@ -844,8 +844,7 @@ device_details: description: Details specific to devices where the template will be deployed, including lists of device IPs, hostnames, serial numbers, or MAC addresses. - type: list - elements: dict + type: dict suboptions: device_ips: description: A list of IP addresses of the devices where the template will be deployed. @@ -1078,7 +1077,7 @@ - param_name: "vlan_name" param_value: "testvlan31" device_details: - - device_ips: ["10.1.2.1", "10.2.3.4"] + device_ips: ["10.1.2.1", "10.2.3.4"] - name: Delete the given project or template from the Cisco Catalyst Center cisco.dnac.template_workflow_manager: @@ -1270,8 +1269,7 @@ def validate_input(self): 'param_value': {'type': 'str'}, }, 'device_details': { - 'type': 'list', - 'elements': 'dict', + 'type': 'dict', 'device_ips': {'type': 'list', 'elements': 'str'}, 'device_hostnames': {'type': 'list', 'elements': 'str'}, 'serial_numbers': {'type': 'list', 'elements': 'str'}, @@ -2420,7 +2418,7 @@ def get_export_template_values(self, export_values): all_project_details = self.dnac._exec( family="configuration_templates", - function='get_projects_details' + function='get_projects_details_v2' ) all_project_details = all_project_details.get("response") for values in export_values: diff --git a/tests/integration/ccc_template_management/defaults/main.yml b/tests/integration/ccc_template_management/defaults/main.yml new file mode 100644 index 0000000000..55a93fc23d --- /dev/null +++ b/tests/integration/ccc_template_management/defaults/main.yml @@ -0,0 +1,2 @@ +--- +testcase: "*" \ No newline at end of file diff --git a/tests/integration/ccc_template_management/meta/main.yml b/tests/integration/ccc_template_management/meta/main.yml new file mode 100644 index 0000000000..5514b6a40c --- /dev/null +++ b/tests/integration/ccc_template_management/meta/main.yml @@ -0,0 +1 @@ +dependencies: [] \ No newline at end of file diff --git a/tests/integration/ccc_template_management/tasks/main.yml b/tests/integration/ccc_template_management/tasks/main.yml new file mode 100644 index 0000000000..09e0832ca2 --- /dev/null +++ b/tests/integration/ccc_template_management/tasks/main.yml @@ -0,0 +1,34 @@ +--- +- name: collect ccc test cases + find: + paths: "{{ role_path }}/tests" + patterns: "{{ testcase }}.yml" + connection: local + register: ccc_cases + tags: sanity + +- debug: + msg: "CCC Cases: {{ ccc_cases }}" + +- set_fact: + test_cases: + files: "{{ ccc_cases.files }}" + tags: sanity + +- debug: + msg: "Test Cases: {{ test_cases }}" + +- name: set test_items + set_fact: + test_items: "{{ test_cases.files | map(attribute='path') | list }}" + tags: sanity + +- debug: + msg: "Test Items: {{ test_items }}" + +- name: run test cases (connection=httpapi) + include_tasks: "{{ test_case_to_run }}" + loop: "{{ test_items }}" + loop_control: + loop_var: test_case_to_run + tags: sanity \ No newline at end of file diff --git a/tests/integration/ccc_template_management/tests/10.195.243.53_1.8.212_Tue Jul 02 2024_templates.json b/tests/integration/ccc_template_management/tests/10.195.243.53_1.8.212_Tue Jul 02 2024_templates.json new file mode 100644 index 0000000000..7b7e524810 --- /dev/null +++ b/tests/integration/ccc_template_management/tests/10.195.243.53_1.8.212_Tue Jul 02 2024_templates.json @@ -0,0 +1,35 @@ +[ + { + "name": "test_template", + "tags": [], + "author": "admin", + "deviceTypes": [ + { + "productFamily": "Switches and Hubs" + } + ], + "softwareType": "IOS", + "softwareVariant": "XE", + "templateContent": "hostname cat9k-1\\n", + "templateParams": [], + "rollbackTemplateParams": [], + "composite": false, + "containingTemplates": [], + "language": "VELOCITY", + "promotedTemplateContent": "hostname cat9k-1\\n", + "promotedTemplateParams": [], + "customParamsOrder": false, + "createTime": 1719914895959, + "lastUpdateTime": 1719914895959, + "latestVersionTime": 1719914896486, + "validationErrors": { + "templateErrors": [], + "rollbackTemplateErrors": [], + "templateId": "10a9a332-44f8-419e-8274-45ad74a38e9b", + "templateVersion": null + }, + "noOfConflicts": 0, + "projectAssociated": true, + "documentDatabase": false + } +] \ No newline at end of file diff --git a/tests/integration/ccc_template_management/tests/test_export_template_and_project.yml b/tests/integration/ccc_template_management/tests/test_export_template_and_project.yml new file mode 100644 index 0000000000..43d242840e --- /dev/null +++ b/tests/integration/ccc_template_management/tests/test_export_template_and_project.yml @@ -0,0 +1,112 @@ +--- +- debug: msg="Starting export template and project test" +- debug: msg="Role Path {{ role_path }}" + +- block: + - name: Load vars and declare dnac vars + include_vars: + file: "{{ role_path }}/vars/vars_export_template_and_project.yml" + name: vars_map + vars: + dnac_login: &dnac_login + dnac_host: "{{ dnac_host }}" + dnac_username: "{{ dnac_username }}" + dnac_password: "{{ dnac_password }}" + dnac_verify: "{{ dnac_verify }}" + dnac_port: "{{ dnac_port }}" + dnac_version: "{{ dnac_version }}" + dnac_debug: "{{ dnac_debug }}" + dnac_log: true + dnac_log_level: DEBUG + config_verify: true + + # - debug: + # msg: "{{ vars_map.create_template_details }}" + + # - debug: + # msg: "{{ vars_map.export_project_details }}" + + # - debug: + # msg: "{{ vars_map.export_template_details }}" + +############################################# +# Pre Tests Clean Up # +############################################# + + - name: Create template before test + cisco.dnac.template_workflow_manager: + <<: *dnac_login + state: merged + config: + - "{{ item }}" + loop: "{{ vars_map.template_details }}" + +############################################# +# EXPORT PROJECT # +############################################# + + - name: Export project from export_project_details + cisco.dnac.template_workflow_manager: + <<: *dnac_login + state: merged + config: + - "{{ item }}" + register: result_export_project + loop: "{{ vars_map.export_project_details }}" + tags: merged + + # - name: Debug item + # debug: + # var: item + # loop: "{{ result_export_project.results }}" + # when: result_export_project is defined + + - name: Assert export of project + assert: + that: + - item.changed == true + loop: "{{ result_export_project.results }}" + when: result_export_project is defined + +############################################# +# EXPORT TEMPLATE # +############################################# + + - name: Export template from export_template_details + cisco.dnac.template_workflow_manager: + <<: *dnac_login + state: merged + config: + - "{{ item }}" + register: result_export_template + loop: "{{ vars_map.export_template_details }}" + tags: merged + + # - name: Debug item + # debug: + # var: item + # loop: "{{ result_export_template.results }}" + # when: result_export_template is defined + + - name: Assert export of template + assert: + that: + - item.changed == true + loop: "{{ result_export_template.results }}" + when: result_export_template is defined + +############################################# +# POST TEST CLEAN UP # +############################################# + + # - name: Pause for 10 seconds after each updation + # pause: + # seconds: 10 + + - name: Delete template after test + cisco.dnac.template_workflow_manager: + <<: *dnac_login + state: deleted + config: + - "{{ item }}" + loop: "{{ vars_map.template_details }}" diff --git a/tests/integration/ccc_template_management/tests/test_import_template_and_project.yml b/tests/integration/ccc_template_management/tests/test_import_template_and_project.yml new file mode 100644 index 0000000000..74b0300840 --- /dev/null +++ b/tests/integration/ccc_template_management/tests/test_import_template_and_project.yml @@ -0,0 +1,129 @@ +--- +- debug: msg="Starting import template and project test" +- debug: msg="Role Path {{ role_path }}" + +- block: + - name: Load vars and declare dnac vars + include_vars: + file: "{{ role_path }}/vars/vars_import_template_and_project.yml" + name: vars_map + vars: + dnac_login: &dnac_login + dnac_host: "{{ dnac_host }}" + dnac_username: "{{ dnac_username }}" + dnac_password: "{{ dnac_password }}" + dnac_verify: "{{ dnac_verify }}" + dnac_port: "{{ dnac_port }}" + dnac_version: "{{ dnac_version }}" + dnac_debug: "{{ dnac_debug }}" + dnac_log: true + dnac_log_level: DEBUG + config_verify: true + + # - debug: + # msg: "{{ vars_map.template_details }}" + + # - debug: + # msg: "{{ vars_map.project_details }}" + + # - debug: + # msg: "{{ vars_map.import_template_details }}" + + # - debug: + # msg: "{{ vars_map.import_project_details }}" + +############################################# +# Pre Tests Clean Up # +############################################# + + - name: Clean up template before test + cisco.dnac.template_workflow_manager: + <<: *dnac_login + state: deleted + config: + - "{{ item }}" + loop: "{{ vars_map.template_details }}" + + # - name: Clean up project before test + # cisco.dnac.template_workflow_manager: + # <<: *dnac_login + # state: deleted + # config: + # - "{{ item }}" + # loop: "{{ vars_map.project_details }}" + +# ############################################# +# # IMPORT PROJECT # +# ############################################# + +# - name: Import project from import_project_details +# cisco.dnac.template_workflow_manager: +# <<: *dnac_login +# state: merged +# config: +# - "{{ item }}" +# register: result_import_project +# loop: "{{ vars_map.import_project_details }}" +# tags: merged + +# # - name: Debug item +# # debug: +# # var: item +# # loop: "{{ result_import_project.results }}" +# # when: result_import_project is defined + +# - name: Assert import project +# assert: +# that: +# - item.changed == true +# - "'imported the project' in item.response[2].import.response.importProject" +# loop: "{{ result_import_project.results }}" +# when: result_import_project is defined + +############################################# +# IMPORT TEMPLATE # +############################################# + + - name: Import template from import_template_details + cisco.dnac.template_workflow_manager: + <<: *dnac_login + state: merged + config: + - "{{ item }}" + register: result_import_template + loop: "{{ vars_map.import_template_details }}" + tags: merged + + # - name: Debug item + # debug: + # var: item + # loop: "{{ result_import_template.results }}" + # when: result_import_template is defined + + - name: Assert import template + assert: + that: + - item.changed == true + - "'imported the templates' in item.response[2].import.response.importTemplate" + loop: "{{ result_import_template.results }}" + when: result_import_template is defined + +############################################# +# Pre Tests Clean Up # +############################################# + + - name: Clean up template before test + cisco.dnac.template_workflow_manager: + <<: *dnac_login + state: deleted + config: + - "{{ item }}" + loop: "{{ vars_map.template_details }}" + + # - name: Clean up project before test + # cisco.dnac.template_workflow_manager: + # <<: *dnac_login + # state: deleted + # config: + # - "{{ item }}" + # loop: "{{ vars_map.project_details }}" diff --git a/tests/integration/ccc_template_management/tests/test_template_management.yml b/tests/integration/ccc_template_management/tests/test_template_management.yml new file mode 100644 index 0000000000..e901ecfada --- /dev/null +++ b/tests/integration/ccc_template_management/tests/test_template_management.yml @@ -0,0 +1,97 @@ +--- +- debug: msg="Starting template management test" +- debug: msg="Role Path {{ role_path }}" + +- block: + - name: Load vars and declare dnac vars + include_vars: + file: "{{ role_path }}/vars/vars_template_management.yml" + name: vars_map + vars: + dnac_login: &dnac_login + dnac_host: "{{ dnac_host }}" + dnac_username: "{{ dnac_username }}" + dnac_password: "{{ dnac_password }}" + dnac_verify: "{{ dnac_verify }}" + dnac_port: "{{ dnac_port }}" + dnac_version: "{{ dnac_version }}" + dnac_debug: "{{ dnac_debug }}" + dnac_log: true + dnac_log_level: DEBUG + + # - debug: + # msg: "{{ vars_map.template_details }}" + +############################################# +# Pre Tests Clean Up # +############################################# + + - name: Clean up template before test + cisco.dnac.template_workflow_manager: + <<: *dnac_login + state: deleted + config: + - "{{ item }}" + loop: "{{ vars_map.template_details }}" + + # - name: Pause for 10 seconds after each updation + # pause: + # seconds: 10 + +############################################# +# CREATE TEMPLATE # +############################################# + + - name: Create template from template_details + cisco.dnac.template_workflow_manager: + <<: *dnac_login + state: merged + config: + - "{{ item }}" + register: result_create_template + loop: "{{ vars_map.template_details }}" + tags: merged + + # - name: Debug item + # debug: + # var: item + # loop: "{{ result_create_template.results }}" + # when: result_create_template is defined + + - name: Assert template creation + assert: + that: + - item.changed == true + loop: "{{ result_create_template.results }}" + when: result_create_template is defined + +############################################# +# DELETE TEMPLATE # +############################################# + + # - name: Pause for 10 seconds after each updation + # pause: + # seconds: 10 + + - name: Delete template from template_details + cisco.dnac.template_workflow_manager: + <<: *dnac_login + state: deleted + config: + - "{{ item }}" + loop: "{{ vars_map.template_details }}" + register: result_delete_template + tags: deleted + + # - name: Debug item + # debug: + # var: item + # loop: "{{ result_delete_template.results }}" + # when: result_delete_template is defined + + - name: Assert deletion of template + assert: + that: + - item.changed == true + loop: "{{ result_delete_template.results }}" + when: result_delete_template is defined diff --git a/tests/integration/ccc_template_management/vars/vars_export_template_and_project.yml b/tests/integration/ccc_template_management/vars/vars_export_template_and_project.yml new file mode 100644 index 0000000000..ba7581a434 --- /dev/null +++ b/tests/integration/ccc_template_management/vars/vars_export_template_and_project.yml @@ -0,0 +1,23 @@ +--- +template_details: + - configuration_templates: + project_name: 'Onboarding Configuration' + template_content: 'hostname cat9k-1\n' + language: 'velocity' + device_types: + - product_family: 'Switches and Hubs' + software_type: 'IOS-XE' + variant: 'XE' + template_name: 'test_template' + version_description: 'Test Template' + +export_project_details: + - export: + project: + - Onboarding Configuration + +export_template_details: + - export: + template: + - project_name: Onboarding Configuration + template_name: test_template \ No newline at end of file diff --git a/tests/integration/ccc_template_management/vars/vars_import_template_and_project.yml b/tests/integration/ccc_template_management/vars/vars_import_template_and_project.yml new file mode 100644 index 0000000000..128e17dd9a --- /dev/null +++ b/tests/integration/ccc_template_management/vars/vars_import_template_and_project.yml @@ -0,0 +1,29 @@ +--- +template_details: + - configuration_templates: + project_name: 'Onboarding Configuration' + template_content: 'hostname cat9k-1\n' + language: 'velocity' + device_types: + - product_family: 'Switches and Hubs' + software_type: 'IOS-XE' + variant: 'XE' + template_name: 'test_template' + version_description: 'Test Template' + +project_details: + - project_name: 'IT Test Project' + +import_template_details: + - import: + template: + do_version: false + project_name: 'Onboarding Configuration' + template_file: '/Users/mutbabu/ansible/dnac/work/collections/ansible_collections/cisco/dnac/tests/integration/ccc_template_management/tests/10.195.243.53_1.8.212_Tue Jul 02 2024_templates.json' + +import_project_details: + - import: + project: + do_version: false + payload: + - name: 'IT Test Project' diff --git a/tests/integration/ccc_template_management/vars/vars_template_management.yml b/tests/integration/ccc_template_management/vars/vars_template_management.yml new file mode 100644 index 0000000000..6dcb1f7e80 --- /dev/null +++ b/tests/integration/ccc_template_management/vars/vars_template_management.yml @@ -0,0 +1,12 @@ +--- +template_details: + - configuration_templates: + project_name: 'Onboarding Configuration' + template_content: 'hostname cat9k-1\n' + language: 'velocity' + device_types: + - product_family: 'Switches and Hubs' + software_type: 'IOS-XE' + variant: 'XE' + template_name: 'test_template' + version_description: 'Test Template'