From aa746eb8d9bd789bc4b72d2f6dd84192af97d5fa Mon Sep 17 00:00:00 2001 From: md-rafeek Date: Wed, 23 Oct 2024 08:46:51 +0530 Subject: [PATCH 01/83] AP bulk updated functionality enabled --- .../modules/accesspoint_workflow_manager.py | 244 +++++++++++++++--- 1 file changed, 202 insertions(+), 42 deletions(-) diff --git a/plugins/modules/accesspoint_workflow_manager.py b/plugins/modules/accesspoint_workflow_manager.py index 96431dfee..d5fb78c29 100644 --- a/plugins/modules/accesspoint_workflow_manager.py +++ b/plugins/modules/accesspoint_workflow_manager.py @@ -1135,7 +1135,11 @@ def __init__(self, module): self.supported_states = ["merged"] self.payload = module.params self.payload["consolidated_result"] = [] - self.keymap = {} + self.keymap = { + "mac_addresses": "mac_address", + "hostnames": "hostname", + "management_ip_addresses": "management_ip_address" + } self.radio_interface = ["6ghz_radio", "xor_radio", "tri_radio"] self.allowed_series = { "6ghz_radio": ["9136I", "9162I", "9163E", "9164I", "IW9167IH", "9178I", "9176I", @@ -1210,6 +1214,7 @@ def validate_input_yml(self): "tri_radio": {"required": False, "type": "dict"}, "reboot_aps": {"required": False, "type": "dict"}, "factory_reset_aps": {"required": False, "type": "dict"}, + "bulk_update_aps": {"required": False, "type": "dict"}, "ap_selected_fields": {"required": False, "type": "str"}, "ap_config_selected_fields": {"required": False, "type": "str"} } @@ -1648,30 +1653,7 @@ def validate_ap_config_parameters(self, ap_config): on it. If validation succeeds, "self.status" will be "success". If it fails, "self.status" will be "failed", and "self.msg" will describe the validation issues. """ - errormsg = [] - invalid_series = self.validate_radio_series(ap_config) - - if invalid_series: - errormsg.append(invalid_series) - - mac_address = ap_config.get("mac_address") - if mac_address: - mac_regex = re.compile(r'^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$') - if not mac_regex.match(mac_address): - errormsg.append("mac_address: Invalid MAC Address '{0}' in playbook.".format( - mac_address)) - - management_ip_address = ap_config.get("management_ip_address") - if management_ip_address and (not self.is_valid_ipv4(management_ip_address) and - not self.is_valid_ipv6(management_ip_address)): - errormsg.append("management_ip_address: Invalid Management IP Address '{0}'\ - in playbook.".format(management_ip_address)) - - if ap_config.get("rf_profile"): - param_spec = dict(type="str", length_max=32) - validate_str(ap_config["rf_profile"], param_spec, "rf_profile", errormsg) - for reboot_reset in ("reboot_aps", "factory_reset_aps"): reboot_reset_aps = ap_config.get(reboot_reset) if reboot_reset_aps is not None: @@ -1694,6 +1676,59 @@ def validate_ap_config_parameters(self, ap_config): validate_str(ap_identifier, param_spec, "hostnames", errormsg) self.log("Hostname validation for '{0}' in {1} completed.".format(ap_identifier, reboot_reset), "DEBUG") + ap_identifier = ap_config.get("ap_identifier") + common_fields_to_change = ap_config.get("common_fields_to_change") + if ap_identifier is not None: + for each_ap in ap_identifier: + mac_address = each_ap.get("mac_address") + if mac_address: + mac_regex = re.compile(r'^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$') + if not mac_regex.match(mac_address): + errormsg.append("mac_address: Invalid MAC Address '{0}' in playbook.".format(mac_address)) + + management_ip_address = each_ap.get("management_ip_address") + if management_ip_address and (not self.is_valid_ipv4(management_ip_address) and + not self.is_valid_ipv6(management_ip_address)): + errormsg.append("management_ip_address: Invalid Management IP Address '{0}' in playbook." + .format(management_ip_address)) + + hostname = each_ap.get("hostname") + if hostname: + param_spec = dict(type="str", length_max=32) + validate_str(hostname, param_spec, "hostname", errormsg) + self.log("Hostname validation for '{0}' completed.".format(hostname), "INFO") + + ap_name = each_ap.get("ap_name") + if ap_name: + param_spec = dict(type="str", length_max=32) + validate_str(ap_name, param_spec, "ap_name", errormsg) + if re.search(r'[ ?<]', ap_name): + errormsg.append("ap_name: Invalid '{0}' in playbook. Space, '?', '<' and XSS characters are not allowed".format(ap_name)) + + if common_fields_to_change is not None: + ap_config = common_fields_to_change + + invalid_series = self.validate_radio_series(ap_config) + if invalid_series: + errormsg.append(invalid_series) + + mac_address = ap_config.get("mac_address") + if mac_address: + mac_regex = re.compile(r'^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$') + if not mac_regex.match(mac_address): + errormsg.append("mac_address: Invalid MAC Address '{0}' in playbook.".format( + mac_address)) + + management_ip_address = ap_config.get("management_ip_address") + if management_ip_address and (not self.is_valid_ipv4(management_ip_address) and + not self.is_valid_ipv6(management_ip_address)): + errormsg.append("management_ip_address: Invalid Management IP Address '{0}'\ + in playbook.".format(management_ip_address)) + + if ap_config.get("rf_profile"): + param_spec = dict(type="str", length_max=32) + validate_str(ap_config["rf_profile"], param_spec, "rf_profile", errormsg) + site = ap_config.get("site") if site: floor = site.get("floor") @@ -2040,6 +2075,18 @@ def get_accesspoint_details(self, input_config): input_param[self.keymap[key]] = input_config[key] break + if input_config.get("ap_identifier"): + ap_list = [] + selected_key = None + for each_ap in input_config.get("ap_identifier"): + for key in ["mac_address", "management_ip_address", "hostname"]: + if each_ap.get(key): + ap_list.append(each_ap[key]) + selected_key = key + break + input_param[self.keymap[selected_key]] = ap_list + self.log("At AP details {0}".format(self.pprint(input_param))) + if not input_param: msg = "Required param of mac_address,ip_address or hostname is not in playbook config" self.log(msg, "WARNING") @@ -2054,10 +2101,13 @@ def get_accesspoint_details(self, input_config): params=input_param, ) - if ap_response and ap_response.get("response"): + if ap_response and ap_response.get("response") and not input_config.get("ap_identifier"): ap_response = self.camel_to_snake_case(ap_response["response"]) accesspoint_exists = True current_configuration = ap_response[0] + elif ap_response and ap_response.get("response") and input_config.get("ap_identifier"): + ap_response = self.camel_to_snake_case(ap_response["response"]) + accesspoint_exists = True except Exception as e: self.msg = "The provided device '{0}' is either invalid or not present in the \ @@ -2069,9 +2119,19 @@ def get_accesspoint_details(self, input_config): Cisco Catalyst Center.".format(str(input_param)) self.module.fail_json(msg="MAC Address not exist:", response=str(self.msg)) else: - if current_configuration["family"] != "Unified AP": + if not input_config.get("ap_identifier") and current_configuration["family"] != "Unified AP": self.msg = "Provided device is not Access Point." self.module.fail_json(msg="MAC Address is not Access point") + elif input_config.get("ap_identifier"): + filter_response = [] + for each_response in ap_response: + if each_response["family"] != "Unified AP": + self.msg = "Provided device {0} is not Access Point.".format(each_response["mac_address"]) + self.log(self.msg, "WARNING") + else: + filter_response.append(each_response) + self.log("Filtered Access points List: {0} ".format(self.pprint(filter_response)), "INFO") + return accesspoint_exists, filter_response return accesspoint_exists, current_configuration @@ -2815,16 +2875,22 @@ def update_ap_configuration(self, ap_config): ap_config["adminStatus"] = True \ if ap_config["adminStatus"] == "Enabled" else False - if ap_config.get(self.keymap["ap_name"]) is not None: - temp_dict[self.keymap["ap_name"]] = ap_config.get(self.keymap["ap_name"]) - temp_dict["apNameNew"] = ap_config["apNameNew"] - temp_dict[self.keymap["mac_address"]] = ap_config[self.keymap["mac_address"]] - del ap_config[self.keymap["ap_name"]] - del ap_config["apNameNew"] - elif ap_config.get(self.keymap["mac_address"]) is not None: - temp_dict[self.keymap["mac_address"]] = ap_config.get(self.keymap["mac_address"]) - - ap_config["apList"].append(temp_dict) + if not ap_config.get("bulk_update"): + if ap_config.get(self.keymap["ap_name"]) is not None: + temp_dict[self.keymap["ap_name"]] = ap_config.get(self.keymap["ap_name"]) + temp_dict["apNameNew"] = ap_config["apNameNew"] + temp_dict[self.keymap["mac_address"]] = ap_config[self.keymap["mac_address"]] + del ap_config[self.keymap["ap_name"]] + del ap_config["apNameNew"] + elif ap_config.get(self.keymap["mac_address"]) is not None: + temp_dict[self.keymap["mac_address"]] = ap_config.get(self.keymap["mac_address"]) + ap_config["apList"].append(temp_dict) + else: + ap_config["apList"] = ap_config.get("ap_list") + del ap_config["ap_list"] + if ap_config.get(self.keymap["ap_name"]) and ap_config.get("apNameNew"): + del ap_config[self.keymap["ap_name"]] + del ap_config["apNameNew"] if ap_config.get(self.keymap["location"]) is not None: ap_config["configureLocation"] = True @@ -2994,7 +3060,10 @@ def update_ap_configuration(self, ap_config): response = response.get("response") self.log("Response of Access Point Configuration: {0}".format( self.pprint(response)), "INFO") - return dict(mac_address=self.have["mac_address"], response=response) + if not ap_config.get("bulk_update"): + return dict(mac_address=self.have["mac_address"], response=response) + else: + return dict(response=response) except Exception as e: self.log("AP config update Error: {0} {1}".format(self.pprint(ap_config), str(e)), @@ -3307,11 +3376,6 @@ def reboot_factory_reset_function(self, ap_list, reboot_or_reset): Returns: dict: A dictionary containing the result of the access point reset/reboot status. """ - self.keymap = { - "mac_addresses": "mac_address", - "hostnames": "hostname", - "management_ip_addresses": "management_ip_address" - } ap_indentity = list(ap_list.keys())[0] if ap_indentity and ap_indentity in self.keymap and len(ap_list.get(ap_indentity)) > 0: @@ -3321,6 +3385,7 @@ def reboot_factory_reset_function(self, ap_list, reboot_or_reset): self.log("{0}: {1}".format(reboot_or_reset, str(ap_indentity_param)), "INFO") ap_exist, ap_details = self.get_accesspoint_details(ap_indentity_param) eth_mac_list.append(ap_details.get("ap_ethernet_mac_address")) + if eth_mac_list: self.log("Ethernet MAC addresses to {0}: {1}".format(reboot_or_reset, eth_mac_list), "INFO") if reboot_or_reset == "reboot_aps": @@ -3332,6 +3397,92 @@ def reboot_factory_reset_function(self, ap_list, reboot_or_reset): return self + def bulk_ap_update(self, bulk_config): + """ + Access Point bulk update common field to update to all mentioned AP list. + + Parameters: + self (dict): A dictionary used to collect the execution results. + ap_list (list): A list containing the APs mac address which need to reset or reboot. + + Returns: + dict: A dictionary containing the result of the access point reset/reboot status. + """ + ap_exist, ap_details = self.get_accesspoint_details(bulk_config) + self.payload["access_point_details"] = ap_details + ap_update_list = [] + common_config = {} + if ap_exist and len(ap_details) > 0: + for each_ap in ap_details: + ap_config_exists, ap_configuration = self.get_accesspoint_config(each_ap["ap_ethernet_mac_address"]) + self.log("Access point configuration exists: {0}, Current configuration: {1}" + .format(ap_config_exists, self.pprint(ap_configuration)), "INFO") + self.want = bulk_config.get("common_fields_to_change") + self.want["mac_address"] = each_ap["mac_address"] + ap_name = [ap.get('ap_name') for ap in bulk_config.get("ap_identifier") + if (each_ap["mac_address"] == ap.get('mac_address') or + each_ap["hostname"] == ap.get('hostname') or + each_ap["management_ip_address"] == ap.get('management_ip_address'))] + self.want["ap_name"] = ap_name[0] + self.log("Access point WANT configuration exists: {0}, Current configuration: {1}" + .format(ap_config_exists, self.pprint(self.want)), "INFO") + consolidated_config = self.config_diff(ap_configuration) + + temp_dict = {} + if consolidated_config.get(self.keymap["ap_name"]) is not None: + temp_dict[self.keymap["ap_name"]] = consolidated_config.get(self.keymap["ap_name"]) + temp_dict["apNameNew"] = consolidated_config["apNameNew"] + temp_dict[self.keymap["mac_address"]] = consolidated_config[self.keymap["mac_address"]] + elif consolidated_config.get(self.keymap["mac_address"]) is not None: + temp_dict[self.keymap["mac_address"]] = consolidated_config.get(self.keymap["mac_address"]) + ap_update_list.append(temp_dict) + common_config.update(consolidated_config) + + common_config["bulk_update"] = True + common_config["ap_list"] = ap_update_list + + task_response = self.update_ap_configuration(common_config) + self.log("Access Point update response: {0} .".format(task_response), "INFO") + responses = {} + if task_response and isinstance(task_response, dict): + resync_retry_count = self.payload.get("dnac_api_task_timeout") + resync_retry_interval = self.payload.get("dnac_task_poll_interval") + while resync_retry_count: + task_details_response = self.get_tasks_by_id( + task_response["response"]["taskId"]) + self.log("Status of the task: {0} .".format(self.status), "INFO") + + if task_details_response.get("endTime") is not None: + if task_details_response.get("status") == "FAILURE": + self.result["changed"] = True if self.result["changed"] is True else False + self.status = "failed" + self.msg = "Unable to get success response, hence AP config not updated" + self.log(self.msg, "ERROR") + self.log("Task Details: {0} .".format(self.pprint(task_details_response)), "ERROR") + responses["accesspoints_updates"] = { + "ap_update_config_task_details": self.get_task_details_by_id(task_response["response"]["taskId"]), + "ap_config_update_status": self.msg} + self.module.fail_json(msg=self.msg, response=responses) + else: + self.result["changed"] = True + self.result["ap_update_status"] = True + self.log("Task Details: {0} .".format(self.pprint( + task_details_response)), "INFO") + self.msg = "AP Configuration - {0} updated Successfully" + self.log(self.msg, "INFO") + responses["accesspoints_updates"] = { + "ap_update_config_task_details": self.get_task_details_by_id(task_details_response["id"]), + "ap_config_update_status": self.msg + } + self.result["ap_update_msg"] = self.msg + break + + time.sleep(resync_retry_interval) + resync_retry_count = resync_retry_count - 1 + + self.result["response"] = responses + return self + def main(): """ main entry point for module execution @@ -3389,6 +3540,15 @@ def main(): ccc_network.reboot_factory_reset_function(ap_list, reboot_reset) module.exit_json(**ccc_network.result) + + bulk_updates = ccc_network.validated_config[0].get("bulk_update_aps") + if bulk_updates is not None: + ccc_network.log("Bulk List: {0}".format(ccc_network.pprint(bulk_updates)), "INFO") + ccc_network.reset_values() + ccc_network.validate_ap_config_parameters(bulk_updates).check_return_status() + ccc_network.bulk_ap_update(bulk_updates) + module.exit_json(**ccc_network.result) + for config in ccc_network.validated_config: ccc_network.reset_values() ccc_network.get_want(config).check_return_status() From 777034779f0fa9b4e40455bcef2d2ff3d44d8320 Mon Sep 17 00:00:00 2001 From: Syed-khadeerahmed Date: Wed, 23 Oct 2024 15:20:17 +0530 Subject: [PATCH 02/83] coding for 2 bugs in progress --- playbooks/swim_workflow_manager.yml | 33 +--- plugins/modules/swim_workflow_manager.py | 187 ++++++++++++++--------- 2 files changed, 117 insertions(+), 103 deletions(-) diff --git a/playbooks/swim_workflow_manager.yml b/playbooks/swim_workflow_manager.yml index 644151add..024689309 100644 --- a/playbooks/swim_workflow_manager.yml +++ b/playbooks/swim_workflow_manager.yml @@ -22,34 +22,9 @@ dnac_api_task_timeout: 1000 dnac_task_poll_interval: 1 config: - - import_image_details: - type: remote - url_details: - payload: - - source_url: - - "http://172.21.236.183/swim/V1712_2_CCO/cat9k_iosxe.17.12.02.SPA.bin" - third_party: False - tagging_details: - image_name: cat9k_iosxe.17.12.02.SPA.bin - device_role: ALL + - tagging_details: + image_name: cat9k_iosxe.17.15.01.SPA.bin + device_role: [Distribution] device_image_family_name: Cisco Catalyst 9300 Switch - site_name: "{{item.site_name}}" + # site_name: Global/Chennai/LTTS/FLOOR1 tagging: True - # image_distribution_details: - # image_name: cat9k_iosxe.17.12.02.SPA.bin - # site_name: "{{item.site_name}}" - # device_role: "{{ item.device_role }}" - # device_family_name: "{{ item.device_family_name }}" - # device_series_name: "Catalyst 9300 Series" - image_activation_details: - image_name: cat9k_iosxe.17.12.02.SPA.bin - site_name: "{{item.site_name}}" - device_role: "{{ item.device_role }}" - device_family_name: "{{ item.device_family_name }}" - device_series_name: "Catalyst 9300 Series" - scehdule_validate: False - distribute_if_needed: True - - with_items: "{{ image_details }}" - tags: - - swim diff --git a/plugins/modules/swim_workflow_manager.py b/plugins/modules/swim_workflow_manager.py index 182f79b4a..849e25190 100644 --- a/plugins/modules/swim_workflow_manager.py +++ b/plugins/modules/swim_workflow_manager.py @@ -1521,59 +1521,86 @@ def get_diff_tagging(self): tagging_details = self.want.get("tagging_details") tag_image_golden = tagging_details.get("tagging") image_name = self.get_image_name_from_id(self.have.get("tagging_image_id")) - - image_params = dict( - image_id=self.have.get("tagging_image_id"), - site_id=self.have.get("site_id"), - device_family_identifier=self.have.get("device_family_identifier"), - device_role=tagging_details.get("device_role", "ALL").upper() - ) - - response = self.dnac._exec( - family="software_image_management_swim", - function='get_golden_tag_status_of_an_image', - op_modifies=True, - params=image_params - ) - self.log("Received API response from 'get_golden_tag_status_of_an_image': {0}".format(str(response)), "DEBUG") - - response = response.get('response') - if response: - image_status = response['taggedGolden'] - if image_status and image_status == tag_image_golden: - self.status = "success" - self.result['changed'] = False - self.msg = "SWIM Image '{0}' already tagged as Golden image in Cisco Catalyst Center".format(image_name) - self.result['msg'] = self.msg - self.result['response'] = self.msg - self.log(self.msg, "INFO") - return self - - if not image_status and image_status == tag_image_golden: - self.status = "success" - self.result['changed'] = False - self.msg = "SWIM Image '{0}' already un-tagged from Golden image in Cisco Catalyst Center".format(image_name) - self.result['response'] = self.msg - self.result['msg'] = self.msg - self.log(self.msg, "INFO") - return self + device_role=tagging_details.get("device_role", "ALL") + + # status = "true" + # for role in device_role: + # image_params = dict( + # image_id=self.have.get("tagging_image_id"), + # site_id=self.have.get("site_id"), + # device_family_identifier=self.have.get("device_family_identifier"), + # device_role=role.upper() + # ) + + # response = self.dnac._exec( + # family="software_image_management_swim", + # function='get_golden_tag_status_of_an_image', + # op_modifies=True, + # params=image_params + # ) + # self.log("Received API response from 'get_golden_tag_status_of_an_image': {0}".format(str(response)), "DEBUG") + + # response = response.get('response') + # if response: + # image_status = response['taggedGolden'] + # if image_status and image_status == tag_image_golden: + # self.status = "success" + # self.msg = "SWIM Image '{0}' already tagged as Golden image in Cisco Catalyst Center for the device role - {1}".format(image_name,role) + # self.log(self.msg, "INFO") + # else: + # status = "false" + # if not image_status and image_status == tag_image_golden: + # self.status = "success" + # self.msg = "SWIM Image '{0}' already un-tagged from Golden image in Cisco Catalyst Cente for the device role - {1}".format(image_name,role) + # self.log(self.msg, "INFO") + + # if status == "true": + # self.status = "success" + # self.result['changed'] = False + # self.msg = "SWIM Image '{0}' already tagged as Golden image in Cisco Catalyst Center for all the roles".format(image_name) + # self.result['msg'] = self.msg + # self.result['response'] = self.msg + # self.log(self.msg, "INFO") + # return self if tag_image_golden: image_params = dict( - imageId=self.have.get("tagging_image_id"), - siteId=self.have.get("site_id"), - deviceFamilyIdentifier=self.have.get("device_family_identifier"), - deviceRole=tagging_details.get("device_role", "ALL").upper() + site_id=self.have.get("site_id"), + device_family_identifier=self.have.get("device_family_identifier"), + device_role="ALL", + image_id=self.have.get("tagging_image_id"), ) - self.log("Parameters for tagging the image as golden: {0}".format(str(image_params)), "INFO") + self.log("Parameters for un-tagging the image as golden: {0}".format(str(image_params)), "INFO") response = self.dnac._exec( family="software_image_management_swim", - function='tag_as_golden_image', + function='remove_golden_tag_for_image', op_modifies=True, params=image_params ) - self.log("Received API response from 'tag_as_golden_image': {0}".format(str(response)), "DEBUG") + self.log("Received API response from 'remove_golden_tag_for_image': {0}".format(str(response)), "DEBUG") + if not response: + self.status = "failed" + self.msg = "Did not get the response of API so cannot check the Golden tagging status of image - {0}".format(image_name) + + device_role=tagging_details.get("device_role", "ALL") + + for role in device_role: + image_params = dict( + imageId=self.have.get("tagging_image_id"), + siteId=self.have.get("site_id"), + deviceFamilyIdentifier=self.have.get("device_family_identifier"), + deviceRole=role.upper() + ) + self.log(f"Parameters for tagging the image as golden for role {role}: {str(image_params)}", "INFO") + + response = self.dnac._exec( + family="software_image_management_swim", + function='tag_as_golden_image', + op_modifies=True, + params=image_params + ) + self.log(f"Received API response from 'tag_as_golden_image' for role {role}: {str(response)}", "DEBUG") else: self.log("Parameters for un-tagging the image as golden: {0}".format(str(image_params)), "INFO") @@ -1596,13 +1623,22 @@ def get_diff_tagging(self): task_details = {} task_id = response.get("response").get("taskId") + device_role=tagging_details.get("device_role", "ALL") + device_role = ', '.join(device_role) + site_name = tagging_details.get("site_name") + + if not site_name: + site_name = "Global" + else: + site_name = tagging_details.get("site_name") + while True: task_details = self.get_task_details(task_id) if not task_details.get("isError") and 'successful' in task_details.get("progress"): self.status = "success" self.result['changed'] = True - self.msg = task_details.get("progress") + self.msg = "Tagging image {0} golden for site {1} for family Cisco Catalyst 9300 Switch for device deviceTag - {2} successful".format(image_name, site_name, device_role) self.result['msg'] = self.msg self.result['response'] = self.msg self.log(self.msg, "INFO") @@ -2089,41 +2125,44 @@ def verify_diff_tagged(self): tag_image_golden = tagging_details.get("tagging") image_id = self.have.get("tagging_image_id") image_name = self.get_image_name_from_id(image_id) + device_role = tagging_details.get("device_role", "ALL") - image_params = dict( - image_id=self.have.get("tagging_image_id"), - site_id=self.have.get("site_id"), - device_family_identifier=self.have.get("device_family_identifier"), - device_role=tagging_details.get("device_role", "ALL").upper() - ) - self.log("Parameters for checking the status of image: {0}".format(str(image_params)), "INFO") + for role in device_role: + image_params = dict( + image_id=self.have.get("tagging_image_id"), + site_id=self.have.get("site_id"), + device_family_identifier=self.have.get("device_family_identifier"), + device_role=role.upper() # Ensure role is uppercase + ) + self.log(f"Parameters for checking the status of image: {str(image_params)}", "INFO") - response = self.dnac._exec( - family="software_image_management_swim", - function='get_golden_tag_status_of_an_image', - op_modifies=True, - params=image_params - ) - self.log("Received API response from 'get_golden_tag_status_of_an_image': {0}".format(str(response)), "DEBUG") - - response = response.get('response') - if response: - image_status = response['taggedGolden'] - if image_status == tag_image_golden: - if tag_image_golden: - self.msg = """The requested image '{0}' has been tagged as golden in the Cisco Catalyst Center and - its status has been successfully verified.""".format(image_name) - self.log(self.msg, "INFO") - else: - self.msg = """The requested image '{0}' has been un-tagged as golden in the Cisco Catalyst Center and - image status has been verified.""".format(image_name) - self.log(self.msg, "INFO") - else: - self.log("""Mismatch between the playbook input for tagging/un-tagging image as golden and the Cisco Catalyst Center indicates that - the tagging/un-tagging task was not executed successfully.""", "INFO") + response = self.dnac._exec( + family="software_image_management_swim", + function='get_golden_tag_status_of_an_image', + op_modifies=True, + params=image_params + ) + self.log(f"Received API response from 'get_golden_tag_status_of_an_image': {str(response)}", "DEBUG") + + response = response.get('response') + if response: + image_status = response.get('taggedGolden') + if image_status == tag_image_golden: + if tag_image_golden: + self.msg = f"""The requested image '{image_name}' has been tagged as golden in the Cisco Catalyst Center and + its status has been successfully verified.""" + self.log(self.msg, "INFO") + else: + self.msg = f"""The requested image '{image_name}' has been un-tagged as golden in the Cisco Catalyst Center and + image status has been verified.""" + self.log(self.msg, "INFO") + else: + self.log("""Mismatch between the playbook input for tagging/un-tagging image as golden and the Cisco Catalyst Center indicates that + the tagging/un-tagging task was not executed successfully.""", "INFO") return self + def verify_diff_distributed(self): """ Verify the distribution status of a software image in Cisco Catalyst Center. From dccadefc15109e4f48661c75fc3fade477a772f0 Mon Sep 17 00:00:00 2001 From: Syed-khadeerahmed Date: Fri, 25 Oct 2024 14:21:29 +0530 Subject: [PATCH 03/83] coding in progress --- plugins/modules/swim_workflow_manager.py | 78 ++++++++++++------------ 1 file changed, 39 insertions(+), 39 deletions(-) diff --git a/plugins/modules/swim_workflow_manager.py b/plugins/modules/swim_workflow_manager.py index 849e25190..d040a1b00 100644 --- a/plugins/modules/swim_workflow_manager.py +++ b/plugins/modules/swim_workflow_manager.py @@ -1523,45 +1523,45 @@ def get_diff_tagging(self): image_name = self.get_image_name_from_id(self.have.get("tagging_image_id")) device_role=tagging_details.get("device_role", "ALL") - # status = "true" - # for role in device_role: - # image_params = dict( - # image_id=self.have.get("tagging_image_id"), - # site_id=self.have.get("site_id"), - # device_family_identifier=self.have.get("device_family_identifier"), - # device_role=role.upper() - # ) - - # response = self.dnac._exec( - # family="software_image_management_swim", - # function='get_golden_tag_status_of_an_image', - # op_modifies=True, - # params=image_params - # ) - # self.log("Received API response from 'get_golden_tag_status_of_an_image': {0}".format(str(response)), "DEBUG") - - # response = response.get('response') - # if response: - # image_status = response['taggedGolden'] - # if image_status and image_status == tag_image_golden: - # self.status = "success" - # self.msg = "SWIM Image '{0}' already tagged as Golden image in Cisco Catalyst Center for the device role - {1}".format(image_name,role) - # self.log(self.msg, "INFO") - # else: - # status = "false" - # if not image_status and image_status == tag_image_golden: - # self.status = "success" - # self.msg = "SWIM Image '{0}' already un-tagged from Golden image in Cisco Catalyst Cente for the device role - {1}".format(image_name,role) - # self.log(self.msg, "INFO") - - # if status == "true": - # self.status = "success" - # self.result['changed'] = False - # self.msg = "SWIM Image '{0}' already tagged as Golden image in Cisco Catalyst Center for all the roles".format(image_name) - # self.result['msg'] = self.msg - # self.result['response'] = self.msg - # self.log(self.msg, "INFO") - # return self + status = "true" + for role in device_role: + image_params = dict( + image_id=self.have.get("tagging_image_id"), + site_id=self.have.get("site_id"), + device_family_identifier=self.have.get("device_family_identifier"), + device_role=role.upper() + ) + + response = self.dnac._exec( + family="software_image_management_swim", + function='get_golden_tag_status_of_an_image', + op_modifies=True, + params=image_params + ) + self.log("Received API response from 'get_golden_tag_status_of_an_image': {0}".format(str(response)), "DEBUG") + + response = response.get('response') + if response: + image_status = response['taggedGolden'] + if image_status and image_status == tag_image_golden: + self.status = "success" + self.msg = "SWIM Image '{0}' already tagged as Golden image in Cisco Catalyst Center for the device role - {1}".format(image_name,role) + self.log(self.msg, "INFO") + else: + status = "false" + if not image_status and image_status == tag_image_golden: + self.status = "success" + self.msg = "SWIM Image '{0}' already un-tagged from Golden image in Cisco Catalyst Cente for the device role - {1}".format(image_name,role) + self.log(self.msg, "INFO") + + if status == "true": + self.status = "success" + self.result['changed'] = False + self.msg = "SWIM Image '{0}' already tagged as Golden image in Cisco Catalyst Center for all the roles".format(image_name) + self.result['msg'] = self.msg + self.result['response'] = self.msg + self.log(self.msg, "INFO") + return self if tag_image_golden: image_params = dict( From 31fad33545a96de5ae9210117bda25535ab8125e Mon Sep 17 00:00:00 2001 From: md-rafeek Date: Sun, 27 Oct 2024 23:38:23 +0530 Subject: [PATCH 04/83] One bug and enhancement bulk update AP completed --- plugins/module_utils/dnac.py | 7 +- .../modules/accesspoint_workflow_manager.py | 548 +++++++++++++++++- 2 files changed, 550 insertions(+), 5 deletions(-) diff --git a/plugins/module_utils/dnac.py b/plugins/module_utils/dnac.py index 90e2e438b..50848c838 100644 --- a/plugins/module_utils/dnac.py +++ b/plugins/module_utils/dnac.py @@ -856,7 +856,7 @@ def get_site_id(self, site_name): return (site_exists, site_id) - def assign_device_to_site(self, device_ids, site_name, site_id): + def assign_device_to_site(self, device_ids, site_name, site_id, site_type=None): """ Assign devices to the specified site. Args: @@ -870,6 +870,11 @@ def assign_device_to_site(self, device_ids, site_name, site_id): Assigns the specified devices to the site. If the assignment is successful, returns True. Otherwise, logs an error and returns False along with error details. """ + if site_type not in ("building", "floor"): + self.msg = "Device(s) can only be assigned to building/floor" + self.log(self.msg, "ERROR") + self.status = "failed" + self.module.fail_json(msg=self.msg) if self.get_ccc_version_as_integer() <= self.get_ccc_version_as_int_from_str("2.3.5.3"): try: diff --git a/plugins/modules/accesspoint_workflow_manager.py b/plugins/modules/accesspoint_workflow_manager.py index 60c614851..a9e1e63de 100644 --- a/plugins/modules/accesspoint_workflow_manager.py +++ b/plugins/modules/accesspoint_workflow_manager.py @@ -570,6 +570,458 @@ type: list elements: str required: false + bulk_update_aps: + description: | + Bulk update to modify one or more access points (APs) of the same series, + identified by their MAC address, hostname, or management IP address. + At least one of the following parameters is required: + - mac_address + - hostname + - management_ip_address + type: dict + required: false + suboptions: + ap_identifier: + description: | + AP identifier is a list of dict which contains MAC address, hostname, or management IP address + which is used to identify the access points for bulk updated with AP Name to update access point. + type: list + elements: str + required: True + suboptions: + mac_address: + description: | + The MAC address used to identify the device. If provided, it cannot be modified. + To identify the specific access point, at least one of the following parameters is required. + - mac_address + - hostname + - management_ip_address + type: str + required: True + hostname: + description: | + The Host Name used to identify the device. If provided, it cannot be modified. + To identify the specific access point, at least one of the following parameters is required. + - mac_address + - hostname + - management_ip_address + type: str + required: True + management_ip_address: + description: | + The Management IP Address used to identify the device. If provided, it cannot be modified. + To identify the specific access point, at least one of the following parameters is required. + - mac_address + - hostname + - management_ip_address + type: str + required: True + ap_name: + description: Current AP name that needs to be changed along with the new AP name. For example, "Test2". + type: str + required: False + common_fields_to_change: + description: | + Common fields to change AP is a dict which contains below data which need to update all listed access points. + type: dict + elements: str + required: True + suboptions: + admin_status: + description: Status of the AP configuration. Accepts "Enabled" or "Disabled". For example, "Enabled". + type: str + required: False + led_status: + description: State of the AP's LED. Accepts "Enabled" or "Disabled". For example, "Enabled". + type: str + required: False + led_brightness_level: + description: Brightness level of the AP's LED. Accepts values from 1 to 8. For example, 3. + type: int + required: False + ap_mode: + description: | + Defines the mode of operation for the Access Point (AP). Possible values include "Local", + "Monitor", "Sniffer", or "Bridge". For example, "Local". + type: str + required: False + location: + description: Location name of the AP. Provide this data if a change is required. For example, "Bangalore". + type: str + required: False + is_assigned_site_as_location: + description: | + Configures whether the access point location is automatically set to the site assigned to the access point. + Accepts "Enabled" or "Disabled". If set to "Enabled", no additional location configuration is required. + type: str + required: False + failover_priority: + description: Priority order for failover in AP configuration. Accepts "Low", "Medium", "High", or "Critical". + type: str + required: False + clean_air_si_2.4ghz: + description: | + Clean Air Spectrum Intelligence (SI) feature status for the 2.4GHz band. Indicates whether. For example, "Enabled". + Clean Air Spectrum Intelligence is enabled or disabled. + type: str + required: False + clean_air_si_5ghz: + description: | + Clean Air Spectrum Intelligence (SI) feature status for the 5GHz band. Indicates whether. For example, "Enabled". + Clean Air Spectrum Intelligence is enabled or disabled. + type: str + required: False + clean_air_si_6ghz: + description: | + Clean Air Spectrum Intelligence (SI) feature status for the 6GHz band. Indicates whether. For example, "Enabled". + Clean Air Spectrum Intelligence is enabled or disabled. + type: str + required: False + primary_controller_name: + description: | + Name or identifier of the primary wireless LAN controller (WLC) managing the Access Point (AP). + For example, "SJ-EWLC-1". + type: str + required: False + primary_ip_address: + description: IP address of the primary wireless LAN controller (WLC) managing the Access Point (AP). + type: dict + required: False + suboptions: + address: + description: IP address of the primary wireless LAN controller. For example, "10.0.0.3". + type: str + required: False + secondary_controller_name: + description: | + Name or identifier of the secondary wireless LAN controller (WLC) managing the Access Point (AP). + To modify only the primary controller, set the secondary and tertiary controller names + to "Inherit from site / Clear". + type: str + required: False + secondary_ip_address: + description: IP address of the secondary wireless LAN controller (WLC) managing the Access Point (AP). + type: dict + required: False + suboptions: + address: + description: IP address of the primary wireless LAN controller. For example, "10.0.0.3". + type: str + required: False + tertiary_controller_name: + description: | + Name or identifier of the tertiary wireless LAN controller (WLC) managing the Access Point (AP). + To modify only the primary controller, set the secondary and tertiary controller names + to "Inherit from site / Clear". + type: str + required: False + tertiary_ip_address: + description: IP address of the tertiary wireless LAN controller (WLC) managing the Access Point (AP). + type: dict + required: False + suboptions: + address: + description: IP address of the primary wireless LAN controller. For example, "10.0.0.2". + type: str + required: False + 2.4ghz_radio: + description: Configuration options for the 2.4GHz radio interface. + type: dict + required: False + suboptions: + admin_status: + description: Administrative status for the 2.4GHz radio interface. For example, "Enabled". + type: str + required: False + antenna_name: + description: Name or type of antenna used for the 2.4GHz radio interface. For example, "other". + type: str + required: False + antenna_gain: + description: | + Specifies the antenna gain value in decibels (dB) for the 2.4GHz radio interface, valid values range + from 0 to 40. For example, 10. + type: int + required: False + radio_role_assignment: + description: Role assignment mode for the 2.4GHz radio interface. Accepts "Auto", "Client-serving", or "Monitor". For example, Auto. + type: str + required: False + cable_loss: + description: | + Cable loss in dB for the 2.4GHz radio interface. Valid values are from 0 to 40. + This value must be less than the antenna gain. For example, 2. + type: int + required: False + antenna_cable_name: + description: Name or type of antenna cable used for the 2.4GHz radio interface. For example, "other". + type: str + required: False + channel_assignment_mode: + description: Mode of channel assignment for the 2.4GHz radio interface. Accepts "Global" or "Custom". For example, "Custom". + type: str + required: False + channel_number: + description: Custom channel number configured for the 2.4GHz radio interface. For example, 6. + type: int + required: False + power_assignment_mode: + description: Mode of power assignment for the 2.4GHz radio interface. Accepts "Global" or "Custom". For example, "Custom". + type: str + required: False + power_level: + description: Custom power level configured for the 2.4GHz radio interface. For example, 3. + type: int + required: False + 5ghz_radio: + description: Configuration options for the 5GHz radio interface. + type: dict + required: False + suboptions: + admin_status: + description: Administrative status for the 5GHz radio interface. For example, "Enabled". + type: str + required: False + antenna_name: + description: Name or type of antenna used for the 5GHz radio interface. For example, "other". + type: str + required: False + antenna_gain: + description: | + Antenna gain value in decibels (dB) for the 5GHz radio interface, valid values range + from 0 to 40. For example, 5. + type: int + required: False + radio_role_assignment: + description: | + Role assignment mode for the 5GHz radio interface. Accepts "Auto", "Client-serving", + or "Monitor". For example, "Auto". This field not required for xor series access point slot 1 + type: str + required: False + cable_loss: + description: | + Cable loss in dB for the 5GHz radio interface. Valid values are from 0 to 40. + This value must be less than the antenna gain. For example, 3. + type: int + required: False + antenna_cable_name: + description: Name or type of antenna cable used for the 5GHz radio interface. For example, "other". + type: str + required: False + channel_assignment_mode: + description: Mode of channel assignment for the 5GHz radio interface. Accepts "Global" or "Custom". For example, "Custom". + type: str + required: False + channel_number: + description: Custom channel number configured for the 5GHz radio interface. For example, 36. + type: int + required: False + power_assignment_mode: + description: Mode of power assignment for the 5GHz radio interface. Accepts "Global" or "Custom". For example, "Custom". + type: str + required: False + power_level: + description: Custom power level configured for the 5GHz radio interface. For example, 3. + type: int + required: False + 6ghz_radio: + description: Configuration options for the 6GHz radio interface. + type: dict + required: False + suboptions: + admin_status: + description: Administrative status for the 6GHz radio interface. For example, "Enabled". + type: str + required: False + antenna_name: + description: Name or type of antenna used for the 6GHz radio interface. For example, "other". + type: str + required: False + antenna_gain: + description: | + Antenna gain value in decibels (dB) for the 6GHz radio interface, valid values range + from 0 to 40. For example, 30. + type: int + required: False + radio_role_assignment: + description: Role assignment mode for the 6GHz radio interface. Accepts "Auto", "Client-serving", or "Monitor". + type: str + required: False + cable_loss: + description: | + Cable loss in dB for the 6GHz radio interface. Valid values are from 0 to 40. + This value must be less than the antenna gain. For example, 10. + type: int + required: False + antenna_cable_name: + description: Name or type of antenna cable used for the 6GHz radio interface. For example, "other". + type: str + required: False + channel_assignment_mode: + description: Mode of channel assignment for the 6GHz radio interface. Accepts "Global" or "Custom". For example, "Custom". + type: str + required: False + channel_number: + description: Custom channel number configured for the 6GHz radio interface. For example, 6. + type: int + required: False + power_assignment_mode: + description: Mode of power assignment for the 6GHz radio interface. Accepts "Global" or "Custom". For example, "Custom". + type: str + required: False + power_level: + description: Custom power level configured for the 6GHz radio interface. For example, 3. + type: int + required: False + xor_radio: + description: Configuration options for the XOR radio interface. + type: dict + required: False + suboptions: + admin_status: + description: Administrative status for the XOR radio interface. For example, "Enabled". + type: str + required: False + antenna_name: + description: Name or type of antenna used for the XOR radio interface. For example, "other". + type: str + required: False + antenna_gain: + description: | + Antenna gain value in decibels (dB) for the XOR radio interface, valid values range + from 0 to 40. For example, 14. + type: int + required: False + radio_role_assignment: + description: | + Role assignment mode for the XOR radio interface. Accepts "Auto", "Client-serving", or "Monitor" + If "radio_role_assignment" is set to "Client-serving" only the power level and channel number can be changed. + Additionally, if the 5 GHz band is selected in the radio band, the power level cannot be modified. + For example, "Auto". + type: str + required: False + radio_band: + description: | + Radio band should be enabled if the radio role assignment is set to "Client-serving" mode. + Accepts "2.4 GHz" or "5 GHz" or "6 GHz". + type: str + required: False + cable_loss: + description: | + Cable loss in dB for the XOR radio interface. Valid values are from 0 to 40. + This value must be less than the antenna gain. For example, 5. + type: int + required: False + antenna_cable_name: + description: Name or type of antenna cable used for the XOR radio interface. For example, "other". + type: str + required: False + channel_assignment_mode: + description: | + Mode of channel assignment for the XOR radio interface. Accepts "Global" or "Custom". + - For "Custom" mode and a radio band of "2.4 GHz", valid values are from 1 to 14. + - For "Custom" mode and a radio band of "5 GHz", valid values are + 36, 40, 44, 48, 52, 56, 60, 64, 100, 104, 108, + 112, 116, 120, 124, 128, 132, 136, 140, 144, + 149, 153, 157, 161, 165, 169, 173. + - For "Custom" mode and a radio band of "6 GHz", valid values are + 1, 5, 9, 13, 17, 21, 25, 29, 33, 37, 41, 45, 49, + 53, 57, 61, 65, 69, 73, 77, 81, 85, 89, 93, 97, + 101, 105, 109, 113, 117, 121, 125, 129, 133, 137, + 141, 145, 149, 153, 157, 161, 165, 169, 173, 177, + 181, 185, 189, 193, 197, 201, 205, 209, 213, 217, + 221, 225, 229, 233. + For example, "Custom". + type: str + required: False + channel_number: + description: Custom channel number configured for the XOR radio interface. For example, 6. + type: int + required: False + channel_width: + description: | + Width of the channel configured for the XOR radio interface. Accepts values + "20 MHz", "40 MHz", "80 MHz", "160 MHz" or "320 MHz". For example, 20 MHz. + type: str + required: False + power_assignment_mode: + description: | + Mode of power assignment for the XOR radio interface. Accepts "Global" or "Custom." + In "Custom" mode, valid values range from 1 to 8. + type: str + required: False + power_level: + description: Custom power level configured for the XOR radio interface. For example, 3. + type: int + required: False + tri_radio: + description: Configuration options for the TRI radio interface. + type: dict + required: False + suboptions: + admin_status: + description: Administrative status for the TRI radio interface. For example, "Enabled". + type: str + required: False + antenna_name: + description: Name or type of antenna used for the TRI radio interface. For example, "other". + type: str + required: False + antenna_gain: + description: | + Antenna gain value in decibels (dB) for the TRI radio interface, valid values range + from 0 to 40. For example, 16. + type: int + required: False + radio_role_assignment: + description: | + Role assignment mode for the TRI radio interface. Accepts "Auto", "Client-serving", or "Monitor". + If radio_role_assignment is "client-serving", then only power-level and channel-level can be changed. + type: str + required: False + cable_loss: + description: | + Cable loss in dB for the TRI radio interface. Valid values are from 0 to 40. + This value must be less than the antenna gain. For example, 6. + type: int + required: False + antenna_cable_name: + description: Name or type of antenna cable used for the TRI radio interface. For example, "other". + type: str + required: False + channel_assignment_mode: + description: | + Mode of channel assignment for the TRI radio interface. Accepts "Global" or "Custom". + For Custom, it accepts values like 36, 40, 44, 48, 52, 56, 60, 64, 100, 104, 108, 112, 116, 120, 124, 128, + 132, 136, 140, 144, 149, 153, 157, 161, 165, 169, 173. (eg. Custom) + type: str + required: False + channel_number: + description: Custom channel number configured for the TRI radio interface. For example, 6. + type: int + required: False + channel_width: + description: | + Width of the channel configured for the TRI radio interface. Accepts values + "20 MHz", "40 MHz", "80 MHz", "160 MHz", or "320 MHz". . For example, 20 MHz. + type: str + required: False + power_assignment_mode: + description: | + Mode of power assignment for the TRI radio interface. Accepts "Global" or "Custom". + In Custom, it accepts values 1 to 8. + type: str + required: False + power_level: + description: Custom power level configured for the TRI radio interface. For example, 3. + type: int + required: False + dual_radio_mode: + description: | + Mode of operation configured for the TRI radio interface. Specifies how the + access point (AP) manages its dual radio functionality. eg . Auto + type: str + required: False requirements: - dnacentersdk >= 2.7.2 @@ -1045,6 +1497,42 @@ - "6c:d6:e3:75:5a:e0" - "e4:38:7e:42:bc:00" register: output_list + + - name: Bulk update Access Point Configurations + cisco.dnac.accesspoint_workflow_manager: + dnac_host: "{{ dnac_host }}" + dnac_username: "{{ dnac_username }}" + dnac_password: "{{ dnac_password }}" + dnac_verify: "{{ dnac_verify }}" + dnac_port: "{{ dnac_port }}" + dnac_version: "{{ dnac_version }}" + dnac_debug: "{{ dnac_debug }}" + dnac_log: True + dnac_log_level: DEBUG + config_verify: True + state: merged + config: + - bulk_update_aps: + ap_identifier: + - mac_address: e4:38:7e:42:bc:40 + ap_name: "Cisco_9166_T2" + - mac_address: a4:88:73:d0:53:60 + ap_name: "Cisco_9120_T2" + common_fields_to_change: + admin_status: "Disabled" + led_status: "Enabled" + led_brightness_level: 1 + ap_mode: "Local" + is_assigned_site_as_location: "Enabled" + failover_priority: "Low" + clean_air_si_2.4ghz: "Enabled" + clean_air_si_5ghz: "Enabled" + clean_air_si_6ghz: "Disabled" + primary_controller_name: "SJ-EWLC-1" + primary_ip_address: + address: "204.192.4.200" + secondary_controller_name: "Inherit from site / Clear" + tertiary_controller_name: "Inherit from site / Clear" """ RETURN = r""" @@ -1113,6 +1601,52 @@ } } } + +#Case-4: Bulk update for single or multiple Accesspoint +response_3: + description: > + A dictionary with bulk update status of AP returned by the Catalyst Center Python SDK + returned: always + type: dict + sample: | + { + "response": { + "accesspoints_updates": { + "ap_config_update_status": ""List of AP Configuration ['Cisco_9120_T2', 'Cisco_9166_T2'] updated Successfully"", + "ap_update_config_task_details": { + "data": "workflow_id=76da6b66-ebf8-4697-bf78-5bd1b45b6367;cfs_id=2fca9272-0209-4fd8-9c54-4e1e690b39ff,a6595513-d395-4a99-bd53-a42b722e9aee; + rollback_status=not_supported;rollback_taskid=0;failure_task=NA;processcfs_complete=true", + "progress": "TASK_INTENT" + } + } + } + } + +#Case-5: Factory reset for single or multiple Accesspoint +response_3: + description: > + A dictionary with factory reset status of AP returned by the Catalyst Center Python SDK + returned: always + type: dict + sample: | + { + "response": { + "accesspoints_updates": { + "ap_reset_status": "APs ['a4:88:73:ce:0a:6c'] reset successfully", + "ap_reset_task_details": { + "reset_api_response": [ + { + "apFactoryResetStatus": "Success", + "apName": "Cisco_9120_T1", + "ethernetMacAddress": "a4:88:73:ce:0a:6c", + "failureReason": null, + "radioMacAddress": "a4:88:73:d0:53:60" + } + ] + } + } + } + } """ @@ -2458,7 +2992,7 @@ def access_point_provision_old(self, rf_profile, hostname, type_name, site_name_ return response return None - def access_point_provision_new(self, rf_profile, device_id, site_id): + def access_point_provision_new(self, rf_profile, device_id, site_id, site_type): """ Provisions a device (AP) to a specific site. support Cisco Catalyst Center version 2.3.7.6 and greater @@ -2492,7 +3026,7 @@ def access_point_provision_new(self, rf_profile, device_id, site_id): try: site_assign_status = self.assign_device_to_site([self.have.get("device_id")], self.have.get("site_name_hierarchy"), - self.have.get("site_id")) + self.have.get("site_id"), site_type) if site_assign_status: self.log('Current device details: {0}'.format(self.pprint(provision_params)), "INFO") response = self.dnac._exec( @@ -2564,7 +3098,7 @@ def provision_device(self): time.sleep(resync_retry_interval) resync_retry_count = resync_retry_count - 1 else: - response = self.access_point_provision_new(rf_profile, device_id, site_id) + response = self.access_point_provision_new(rf_profile, device_id, site_id, type_name) if response and isinstance(response, dict): task_id = response.get("response", {}).get("taskId") resync_retry_count = int(self.payload.get("dnac_api_task_timeout")) @@ -3378,6 +3912,10 @@ def reboot_factory_reset_function(self, ap_list, reboot_or_reset): dict: A dictionary containing the result of the access point reset/reboot status. """ ap_indentity = list(ap_list.keys())[0] + if ap_indentity and len(ap_list.get(ap_indentity)) > 100: + error_msg = "Maximum allowed AP list 100 but passed {0}".format(str(len(ap_list.get(ap_indentity)))) + self.log(error_msg, "ERROR") + self.module.fail_json(msg=error_msg) if ap_indentity and ap_indentity in self.keymap and len(ap_list.get(ap_indentity)) > 0: eth_mac_list = [] @@ -3413,6 +3951,7 @@ def bulk_ap_update(self, bulk_config): self.payload["access_point_details"] = ap_details ap_update_list = [] common_config = {} + ap_output_list = [] if ap_exist and len(ap_details) > 0: for each_ap in ap_details: ap_config_exists, ap_configuration = self.get_accesspoint_config(each_ap["ap_ethernet_mac_address"]) @@ -3425,6 +3964,7 @@ def bulk_ap_update(self, bulk_config): each_ap["hostname"] == ap.get('hostname') or each_ap["management_ip_address"] == ap.get('management_ip_address'))] self.want["ap_name"] = ap_name[0] + ap_output_list.append(ap_name[0]) self.log("Access point WANT configuration exists: {0}, Current configuration: {1}" .format(ap_config_exists, self.pprint(self.want)), "INFO") consolidated_config = self.config_diff(ap_configuration) @@ -3469,7 +4009,7 @@ def bulk_ap_update(self, bulk_config): self.result["ap_update_status"] = True self.log("Task Details: {0} .".format(self.pprint( task_details_response)), "INFO") - self.msg = "AP Configuration - {0} updated Successfully" + self.msg = "List of AP Configuration {0} updated Successfully".format(str(ap_output_list)) self.log(self.msg, "INFO") responses["accesspoints_updates"] = { "ap_update_config_task_details": self.get_task_details_by_id(task_details_response["id"]), From ba3db2f789c640cd5088cf557aa73640f7a375fe Mon Sep 17 00:00:00 2001 From: md-rafeek Date: Mon, 28 Oct 2024 10:02:33 +0530 Subject: [PATCH 05/83] Changes in assign device to site in danc.py --- plugins/module_utils/dnac.py | 15 +++++++++------ plugins/modules/accesspoint_workflow_manager.py | 9 ++++----- 2 files changed, 13 insertions(+), 11 deletions(-) diff --git a/plugins/module_utils/dnac.py b/plugins/module_utils/dnac.py index 50848c838..68b08c236 100644 --- a/plugins/module_utils/dnac.py +++ b/plugins/module_utils/dnac.py @@ -856,7 +856,7 @@ def get_site_id(self, site_name): return (site_exists, site_id) - def assign_device_to_site(self, device_ids, site_name, site_id, site_type=None): + def assign_device_to_site(self, device_ids, site_name, site_id): """ Assign devices to the specified site. Args: @@ -870,11 +870,14 @@ def assign_device_to_site(self, device_ids, site_name, site_id, site_type=None): Assigns the specified devices to the site. If the assignment is successful, returns True. Otherwise, logs an error and returns False along with error details. """ - if site_type not in ("building", "floor"): - self.msg = "Device(s) can only be assigned to building/floor" - self.log(self.msg, "ERROR") - self.status = "failed" - self.module.fail_json(msg=self.msg) + site_response = self.get_site(site_name) + if site_response.get("response") and site_response["response"][0].get("type"): + site_type = site_response["response"][0].get("type") + if site_type not in ("building", "floor"): + self.msg = "Device(s) can only be assigned to building/floor" + self.log(self.msg, "ERROR") + self.status = "failed" + self.module.fail_json(msg=self.msg) if self.get_ccc_version_as_integer() <= self.get_ccc_version_as_int_from_str("2.3.5.3"): try: diff --git a/plugins/modules/accesspoint_workflow_manager.py b/plugins/modules/accesspoint_workflow_manager.py index a9e1e63de..3cfb8ff95 100644 --- a/plugins/modules/accesspoint_workflow_manager.py +++ b/plugins/modules/accesspoint_workflow_manager.py @@ -3026,7 +3026,7 @@ def access_point_provision_new(self, rf_profile, device_id, site_id, site_type): try: site_assign_status = self.assign_device_to_site([self.have.get("device_id")], self.have.get("site_name_hierarchy"), - self.have.get("site_id"), site_type) + self.have.get("site_id")) if site_assign_status: self.log('Current device details: {0}'.format(self.pprint(provision_params)), "INFO") response = self.dnac._exec( @@ -3960,9 +3960,9 @@ def bulk_ap_update(self, bulk_config): self.want = bulk_config.get("common_fields_to_change") self.want["mac_address"] = each_ap["mac_address"] ap_name = [ap.get('ap_name') for ap in bulk_config.get("ap_identifier") - if (each_ap["mac_address"] == ap.get('mac_address') or - each_ap["hostname"] == ap.get('hostname') or - each_ap["management_ip_address"] == ap.get('management_ip_address'))] + if (each_ap["mac_address"] == ap.get('mac_address') or + each_ap["hostname"] == ap.get('hostname') or + each_ap["management_ip_address"] == ap.get('management_ip_address'))] self.want["ap_name"] = ap_name[0] ap_output_list.append(ap_name[0]) self.log("Access point WANT configuration exists: {0}, Current configuration: {1}" @@ -4081,7 +4081,6 @@ def main(): ccc_network.reboot_factory_reset_function(ap_list, reboot_reset) module.exit_json(**ccc_network.result) - bulk_updates = ccc_network.validated_config[0].get("bulk_update_aps") if bulk_updates is not None: ccc_network.log("Bulk List: {0}".format(ccc_network.pprint(bulk_updates)), "INFO") From a403b8752d562b3dd4c72f5194be2ffe0526d602 Mon Sep 17 00:00:00 2001 From: md-rafeek Date: Mon, 28 Oct 2024 14:50:27 +0530 Subject: [PATCH 06/83] Minor changes for the assign device to site --- plugins/modules/accesspoint_workflow_manager.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plugins/modules/accesspoint_workflow_manager.py b/plugins/modules/accesspoint_workflow_manager.py index 3cfb8ff95..bf266749d 100644 --- a/plugins/modules/accesspoint_workflow_manager.py +++ b/plugins/modules/accesspoint_workflow_manager.py @@ -2992,7 +2992,7 @@ def access_point_provision_old(self, rf_profile, hostname, type_name, site_name_ return response return None - def access_point_provision_new(self, rf_profile, device_id, site_id, site_type): + def access_point_provision_new(self, rf_profile, device_id, site_id): """ Provisions a device (AP) to a specific site. support Cisco Catalyst Center version 2.3.7.6 and greater @@ -3098,7 +3098,7 @@ def provision_device(self): time.sleep(resync_retry_interval) resync_retry_count = resync_retry_count - 1 else: - response = self.access_point_provision_new(rf_profile, device_id, site_id, type_name) + response = self.access_point_provision_new(rf_profile, device_id, site_id) if response and isinstance(response, dict): task_id = response.get("response", {}).get("taskId") resync_retry_count = int(self.payload.get("dnac_api_task_timeout")) From ff126cbb80f96e0568c134e27c2452ae057bb09d Mon Sep 17 00:00:00 2001 From: md-rafeek Date: Mon, 28 Oct 2024 22:32:10 +0530 Subject: [PATCH 07/83] dnac.py changes updated in ap code --- plugins/modules/accesspoint_workflow_manager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/modules/accesspoint_workflow_manager.py b/plugins/modules/accesspoint_workflow_manager.py index bf266749d..cc3c2be1a 100644 --- a/plugins/modules/accesspoint_workflow_manager.py +++ b/plugins/modules/accesspoint_workflow_manager.py @@ -2886,7 +2886,7 @@ def get_site_device(self, site_id, ap_mac_address, site_exist=None, current_site """ try: site_name = self.have.get("site_name_hierarchy", self.want.get("site_name")) - device_list = self.get_device_ids_from_site(site_name, site_id) + api_response, device_list = self.get_device_ids_from_site(site_name, site_id) if current_config.get("id") is not None and current_config.get("id") in device_list: self.log("Device with MAC address: {0} found in site: {1} Proceeding with ap_site updation." .format(ap_mac_address, site_id), "INFO") From 83e03f87ce4450b4bbda3ba7837ec460d1059110 Mon Sep 17 00:00:00 2001 From: md-rafeek Date: Mon, 28 Oct 2024 22:47:09 +0530 Subject: [PATCH 08/83] sanity issue changes updated in ap code --- plugins/modules/accesspoint_workflow_manager.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plugins/modules/accesspoint_workflow_manager.py b/plugins/modules/accesspoint_workflow_manager.py index cc3c2be1a..fe0044628 100644 --- a/plugins/modules/accesspoint_workflow_manager.py +++ b/plugins/modules/accesspoint_workflow_manager.py @@ -1603,7 +1603,7 @@ } #Case-4: Bulk update for single or multiple Accesspoint -response_3: +response_4: description: > A dictionary with bulk update status of AP returned by the Catalyst Center Python SDK returned: always @@ -1623,7 +1623,7 @@ } #Case-5: Factory reset for single or multiple Accesspoint -response_3: +response_5: description: > A dictionary with factory reset status of AP returned by the Catalyst Center Python SDK returned: always From 2442004b8729689a10bae93f4146f3383f3e1ed9 Mon Sep 17 00:00:00 2001 From: md-rafeek Date: Mon, 28 Oct 2024 22:53:53 +0530 Subject: [PATCH 09/83] sanity issue changes updated in ap code --- .../modules/accesspoint_workflow_manager.py | 402 ------------------ 1 file changed, 402 deletions(-) diff --git a/plugins/modules/accesspoint_workflow_manager.py b/plugins/modules/accesspoint_workflow_manager.py index fe0044628..0f603cc0c 100644 --- a/plugins/modules/accesspoint_workflow_manager.py +++ b/plugins/modules/accesspoint_workflow_manager.py @@ -620,408 +620,6 @@ description: Current AP name that needs to be changed along with the new AP name. For example, "Test2". type: str required: False - common_fields_to_change: - description: | - Common fields to change AP is a dict which contains below data which need to update all listed access points. - type: dict - elements: str - required: True - suboptions: - admin_status: - description: Status of the AP configuration. Accepts "Enabled" or "Disabled". For example, "Enabled". - type: str - required: False - led_status: - description: State of the AP's LED. Accepts "Enabled" or "Disabled". For example, "Enabled". - type: str - required: False - led_brightness_level: - description: Brightness level of the AP's LED. Accepts values from 1 to 8. For example, 3. - type: int - required: False - ap_mode: - description: | - Defines the mode of operation for the Access Point (AP). Possible values include "Local", - "Monitor", "Sniffer", or "Bridge". For example, "Local". - type: str - required: False - location: - description: Location name of the AP. Provide this data if a change is required. For example, "Bangalore". - type: str - required: False - is_assigned_site_as_location: - description: | - Configures whether the access point location is automatically set to the site assigned to the access point. - Accepts "Enabled" or "Disabled". If set to "Enabled", no additional location configuration is required. - type: str - required: False - failover_priority: - description: Priority order for failover in AP configuration. Accepts "Low", "Medium", "High", or "Critical". - type: str - required: False - clean_air_si_2.4ghz: - description: | - Clean Air Spectrum Intelligence (SI) feature status for the 2.4GHz band. Indicates whether. For example, "Enabled". - Clean Air Spectrum Intelligence is enabled or disabled. - type: str - required: False - clean_air_si_5ghz: - description: | - Clean Air Spectrum Intelligence (SI) feature status for the 5GHz band. Indicates whether. For example, "Enabled". - Clean Air Spectrum Intelligence is enabled or disabled. - type: str - required: False - clean_air_si_6ghz: - description: | - Clean Air Spectrum Intelligence (SI) feature status for the 6GHz band. Indicates whether. For example, "Enabled". - Clean Air Spectrum Intelligence is enabled or disabled. - type: str - required: False - primary_controller_name: - description: | - Name or identifier of the primary wireless LAN controller (WLC) managing the Access Point (AP). - For example, "SJ-EWLC-1". - type: str - required: False - primary_ip_address: - description: IP address of the primary wireless LAN controller (WLC) managing the Access Point (AP). - type: dict - required: False - suboptions: - address: - description: IP address of the primary wireless LAN controller. For example, "10.0.0.3". - type: str - required: False - secondary_controller_name: - description: | - Name or identifier of the secondary wireless LAN controller (WLC) managing the Access Point (AP). - To modify only the primary controller, set the secondary and tertiary controller names - to "Inherit from site / Clear". - type: str - required: False - secondary_ip_address: - description: IP address of the secondary wireless LAN controller (WLC) managing the Access Point (AP). - type: dict - required: False - suboptions: - address: - description: IP address of the primary wireless LAN controller. For example, "10.0.0.3". - type: str - required: False - tertiary_controller_name: - description: | - Name or identifier of the tertiary wireless LAN controller (WLC) managing the Access Point (AP). - To modify only the primary controller, set the secondary and tertiary controller names - to "Inherit from site / Clear". - type: str - required: False - tertiary_ip_address: - description: IP address of the tertiary wireless LAN controller (WLC) managing the Access Point (AP). - type: dict - required: False - suboptions: - address: - description: IP address of the primary wireless LAN controller. For example, "10.0.0.2". - type: str - required: False - 2.4ghz_radio: - description: Configuration options for the 2.4GHz radio interface. - type: dict - required: False - suboptions: - admin_status: - description: Administrative status for the 2.4GHz radio interface. For example, "Enabled". - type: str - required: False - antenna_name: - description: Name or type of antenna used for the 2.4GHz radio interface. For example, "other". - type: str - required: False - antenna_gain: - description: | - Specifies the antenna gain value in decibels (dB) for the 2.4GHz radio interface, valid values range - from 0 to 40. For example, 10. - type: int - required: False - radio_role_assignment: - description: Role assignment mode for the 2.4GHz radio interface. Accepts "Auto", "Client-serving", or "Monitor". For example, Auto. - type: str - required: False - cable_loss: - description: | - Cable loss in dB for the 2.4GHz radio interface. Valid values are from 0 to 40. - This value must be less than the antenna gain. For example, 2. - type: int - required: False - antenna_cable_name: - description: Name or type of antenna cable used for the 2.4GHz radio interface. For example, "other". - type: str - required: False - channel_assignment_mode: - description: Mode of channel assignment for the 2.4GHz radio interface. Accepts "Global" or "Custom". For example, "Custom". - type: str - required: False - channel_number: - description: Custom channel number configured for the 2.4GHz radio interface. For example, 6. - type: int - required: False - power_assignment_mode: - description: Mode of power assignment for the 2.4GHz radio interface. Accepts "Global" or "Custom". For example, "Custom". - type: str - required: False - power_level: - description: Custom power level configured for the 2.4GHz radio interface. For example, 3. - type: int - required: False - 5ghz_radio: - description: Configuration options for the 5GHz radio interface. - type: dict - required: False - suboptions: - admin_status: - description: Administrative status for the 5GHz radio interface. For example, "Enabled". - type: str - required: False - antenna_name: - description: Name or type of antenna used for the 5GHz radio interface. For example, "other". - type: str - required: False - antenna_gain: - description: | - Antenna gain value in decibels (dB) for the 5GHz radio interface, valid values range - from 0 to 40. For example, 5. - type: int - required: False - radio_role_assignment: - description: | - Role assignment mode for the 5GHz radio interface. Accepts "Auto", "Client-serving", - or "Monitor". For example, "Auto". This field not required for xor series access point slot 1 - type: str - required: False - cable_loss: - description: | - Cable loss in dB for the 5GHz radio interface. Valid values are from 0 to 40. - This value must be less than the antenna gain. For example, 3. - type: int - required: False - antenna_cable_name: - description: Name or type of antenna cable used for the 5GHz radio interface. For example, "other". - type: str - required: False - channel_assignment_mode: - description: Mode of channel assignment for the 5GHz radio interface. Accepts "Global" or "Custom". For example, "Custom". - type: str - required: False - channel_number: - description: Custom channel number configured for the 5GHz radio interface. For example, 36. - type: int - required: False - power_assignment_mode: - description: Mode of power assignment for the 5GHz radio interface. Accepts "Global" or "Custom". For example, "Custom". - type: str - required: False - power_level: - description: Custom power level configured for the 5GHz radio interface. For example, 3. - type: int - required: False - 6ghz_radio: - description: Configuration options for the 6GHz radio interface. - type: dict - required: False - suboptions: - admin_status: - description: Administrative status for the 6GHz radio interface. For example, "Enabled". - type: str - required: False - antenna_name: - description: Name or type of antenna used for the 6GHz radio interface. For example, "other". - type: str - required: False - antenna_gain: - description: | - Antenna gain value in decibels (dB) for the 6GHz radio interface, valid values range - from 0 to 40. For example, 30. - type: int - required: False - radio_role_assignment: - description: Role assignment mode for the 6GHz radio interface. Accepts "Auto", "Client-serving", or "Monitor". - type: str - required: False - cable_loss: - description: | - Cable loss in dB for the 6GHz radio interface. Valid values are from 0 to 40. - This value must be less than the antenna gain. For example, 10. - type: int - required: False - antenna_cable_name: - description: Name or type of antenna cable used for the 6GHz radio interface. For example, "other". - type: str - required: False - channel_assignment_mode: - description: Mode of channel assignment for the 6GHz radio interface. Accepts "Global" or "Custom". For example, "Custom". - type: str - required: False - channel_number: - description: Custom channel number configured for the 6GHz radio interface. For example, 6. - type: int - required: False - power_assignment_mode: - description: Mode of power assignment for the 6GHz radio interface. Accepts "Global" or "Custom". For example, "Custom". - type: str - required: False - power_level: - description: Custom power level configured for the 6GHz radio interface. For example, 3. - type: int - required: False - xor_radio: - description: Configuration options for the XOR radio interface. - type: dict - required: False - suboptions: - admin_status: - description: Administrative status for the XOR radio interface. For example, "Enabled". - type: str - required: False - antenna_name: - description: Name or type of antenna used for the XOR radio interface. For example, "other". - type: str - required: False - antenna_gain: - description: | - Antenna gain value in decibels (dB) for the XOR radio interface, valid values range - from 0 to 40. For example, 14. - type: int - required: False - radio_role_assignment: - description: | - Role assignment mode for the XOR radio interface. Accepts "Auto", "Client-serving", or "Monitor" - If "radio_role_assignment" is set to "Client-serving" only the power level and channel number can be changed. - Additionally, if the 5 GHz band is selected in the radio band, the power level cannot be modified. - For example, "Auto". - type: str - required: False - radio_band: - description: | - Radio band should be enabled if the radio role assignment is set to "Client-serving" mode. - Accepts "2.4 GHz" or "5 GHz" or "6 GHz". - type: str - required: False - cable_loss: - description: | - Cable loss in dB for the XOR radio interface. Valid values are from 0 to 40. - This value must be less than the antenna gain. For example, 5. - type: int - required: False - antenna_cable_name: - description: Name or type of antenna cable used for the XOR radio interface. For example, "other". - type: str - required: False - channel_assignment_mode: - description: | - Mode of channel assignment for the XOR radio interface. Accepts "Global" or "Custom". - - For "Custom" mode and a radio band of "2.4 GHz", valid values are from 1 to 14. - - For "Custom" mode and a radio band of "5 GHz", valid values are - 36, 40, 44, 48, 52, 56, 60, 64, 100, 104, 108, - 112, 116, 120, 124, 128, 132, 136, 140, 144, - 149, 153, 157, 161, 165, 169, 173. - - For "Custom" mode and a radio band of "6 GHz", valid values are - 1, 5, 9, 13, 17, 21, 25, 29, 33, 37, 41, 45, 49, - 53, 57, 61, 65, 69, 73, 77, 81, 85, 89, 93, 97, - 101, 105, 109, 113, 117, 121, 125, 129, 133, 137, - 141, 145, 149, 153, 157, 161, 165, 169, 173, 177, - 181, 185, 189, 193, 197, 201, 205, 209, 213, 217, - 221, 225, 229, 233. - For example, "Custom". - type: str - required: False - channel_number: - description: Custom channel number configured for the XOR radio interface. For example, 6. - type: int - required: False - channel_width: - description: | - Width of the channel configured for the XOR radio interface. Accepts values - "20 MHz", "40 MHz", "80 MHz", "160 MHz" or "320 MHz". For example, 20 MHz. - type: str - required: False - power_assignment_mode: - description: | - Mode of power assignment for the XOR radio interface. Accepts "Global" or "Custom." - In "Custom" mode, valid values range from 1 to 8. - type: str - required: False - power_level: - description: Custom power level configured for the XOR radio interface. For example, 3. - type: int - required: False - tri_radio: - description: Configuration options for the TRI radio interface. - type: dict - required: False - suboptions: - admin_status: - description: Administrative status for the TRI radio interface. For example, "Enabled". - type: str - required: False - antenna_name: - description: Name or type of antenna used for the TRI radio interface. For example, "other". - type: str - required: False - antenna_gain: - description: | - Antenna gain value in decibels (dB) for the TRI radio interface, valid values range - from 0 to 40. For example, 16. - type: int - required: False - radio_role_assignment: - description: | - Role assignment mode for the TRI radio interface. Accepts "Auto", "Client-serving", or "Monitor". - If radio_role_assignment is "client-serving", then only power-level and channel-level can be changed. - type: str - required: False - cable_loss: - description: | - Cable loss in dB for the TRI radio interface. Valid values are from 0 to 40. - This value must be less than the antenna gain. For example, 6. - type: int - required: False - antenna_cable_name: - description: Name or type of antenna cable used for the TRI radio interface. For example, "other". - type: str - required: False - channel_assignment_mode: - description: | - Mode of channel assignment for the TRI radio interface. Accepts "Global" or "Custom". - For Custom, it accepts values like 36, 40, 44, 48, 52, 56, 60, 64, 100, 104, 108, 112, 116, 120, 124, 128, - 132, 136, 140, 144, 149, 153, 157, 161, 165, 169, 173. (eg. Custom) - type: str - required: False - channel_number: - description: Custom channel number configured for the TRI radio interface. For example, 6. - type: int - required: False - channel_width: - description: | - Width of the channel configured for the TRI radio interface. Accepts values - "20 MHz", "40 MHz", "80 MHz", "160 MHz", or "320 MHz". . For example, 20 MHz. - type: str - required: False - power_assignment_mode: - description: | - Mode of power assignment for the TRI radio interface. Accepts "Global" or "Custom". - In Custom, it accepts values 1 to 8. - type: str - required: False - power_level: - description: Custom power level configured for the TRI radio interface. For example, 3. - type: int - required: False - dual_radio_mode: - description: | - Mode of operation configured for the TRI radio interface. Specifies how the - access point (AP) manages its dual radio functionality. eg . Auto - type: str - required: False requirements: - dnacentersdk >= 2.7.2 From 41d79e098230f9b32319adadd05248f14eaadac7 Mon Sep 17 00:00:00 2001 From: md-rafeek Date: Mon, 28 Oct 2024 23:00:12 +0530 Subject: [PATCH 10/83] sanity issue changes updated in ap code --- .../modules/accesspoint_workflow_manager.py | 104 ++++++++++++++++++ 1 file changed, 104 insertions(+) diff --git a/plugins/modules/accesspoint_workflow_manager.py b/plugins/modules/accesspoint_workflow_manager.py index 0f603cc0c..da36a0fac 100644 --- a/plugins/modules/accesspoint_workflow_manager.py +++ b/plugins/modules/accesspoint_workflow_manager.py @@ -620,6 +620,110 @@ description: Current AP name that needs to be changed along with the new AP name. For example, "Test2". type: str required: False + common_fields_to_change: + description: | + Common fields to change AP is a dict which contains below data which need to update all listed access points. + type: dict + elements: str + required: True + suboptions: + admin_status: + description: Status of the AP configuration. Accepts "Enabled" or "Disabled". For example, "Enabled". + type: str + required: False + led_status: + description: State of the AP's LED. Accepts "Enabled" or "Disabled". For example, "Enabled". + type: str + required: False + led_brightness_level: + description: Brightness level of the AP's LED. Accepts values from 1 to 8. For example, 3. + type: int + required: False + ap_mode: + description: | + Defines the mode of operation for the Access Point (AP). Possible values include "Local", + "Monitor", "Sniffer", or "Bridge". For example, "Local". + type: str + required: False + location: + description: Location name of the AP. Provide this data if a change is required. For example, "Bangalore". + type: str + required: False + is_assigned_site_as_location: + description: | + Configures whether the access point location is automatically set to the site assigned to the access point. + Accepts "Enabled" or "Disabled". If set to "Enabled", no additional location configuration is required. + type: str + required: False + failover_priority: + description: Priority order for failover in AP configuration. Accepts "Low", "Medium", "High", or "Critical". + type: str + required: False + clean_air_si_2.4ghz: + description: | + Clean Air Spectrum Intelligence (SI) feature status for the 2.4GHz band. Indicates whether. For example, "Enabled". + Clean Air Spectrum Intelligence is enabled or disabled. + type: str + required: False + clean_air_si_5ghz: + description: | + Clean Air Spectrum Intelligence (SI) feature status for the 5GHz band. Indicates whether. For example, "Enabled". + Clean Air Spectrum Intelligence is enabled or disabled. + type: str + required: False + clean_air_si_6ghz: + description: | + Clean Air Spectrum Intelligence (SI) feature status for the 6GHz band. Indicates whether. For example, "Enabled". + Clean Air Spectrum Intelligence is enabled or disabled. + type: str + required: False + primary_controller_name: + description: | + Name or identifier of the primary wireless LAN controller (WLC) managing the Access Point (AP). + For example, "SJ-EWLC-1". + type: str + required: False + primary_ip_address: + description: IP address of the primary wireless LAN controller (WLC) managing the Access Point (AP). + type: dict + required: False + suboptions: + address: + description: IP address of the primary wireless LAN controller. For example, "10.0.0.3". + type: str + required: False + secondary_controller_name: + description: | + Name or identifier of the secondary wireless LAN controller (WLC) managing the Access Point (AP). + To modify only the primary controller, set the secondary and tertiary controller names + to "Inherit from site / Clear". + type: str + required: False + secondary_ip_address: + description: IP address of the secondary wireless LAN controller (WLC) managing the Access Point (AP). + type: dict + required: False + suboptions: + address: + description: IP address of the primary wireless LAN controller. For example, "10.0.0.3". + type: str + required: False + tertiary_controller_name: + description: | + Name or identifier of the tertiary wireless LAN controller (WLC) managing the Access Point (AP). + To modify only the primary controller, set the secondary and tertiary controller names + to "Inherit from site / Clear". + type: str + required: False + tertiary_ip_address: + description: IP address of the tertiary wireless LAN controller (WLC) managing the Access Point (AP). + type: dict + required: False + suboptions: + address: + description: IP address of the primary wireless LAN controller. For example, "10.0.0.2". + type: str + required: False requirements: - dnacentersdk >= 2.7.2 From bae0a576ed0cef07f50dae113f6932a44c4e76e4 Mon Sep 17 00:00:00 2001 From: md-rafeek Date: Mon, 28 Oct 2024 23:10:25 +0530 Subject: [PATCH 11/83] sanity issue changes updated in ap code --- .../modules/accesspoint_workflow_manager.py | 299 +++++++++++++++++- 1 file changed, 298 insertions(+), 1 deletion(-) diff --git a/plugins/modules/accesspoint_workflow_manager.py b/plugins/modules/accesspoint_workflow_manager.py index da36a0fac..c897004e8 100644 --- a/plugins/modules/accesspoint_workflow_manager.py +++ b/plugins/modules/accesspoint_workflow_manager.py @@ -624,7 +624,6 @@ description: | Common fields to change AP is a dict which contains below data which need to update all listed access points. type: dict - elements: str required: True suboptions: admin_status: @@ -724,6 +723,304 @@ description: IP address of the primary wireless LAN controller. For example, "10.0.0.2". type: str required: False + 2.4ghz_radio: + description: Configuration options for the 2.4GHz radio interface. + type: dict + required: False + suboptions: + admin_status: + description: Administrative status for the 2.4GHz radio interface. For example, "Enabled". + type: str + required: False + antenna_name: + description: Name or type of antenna used for the 2.4GHz radio interface. For example, "other". + type: str + required: False + antenna_gain: + description: | + Specifies the antenna gain value in decibels (dB) for the 2.4GHz radio interface, valid values range + from 0 to 40. For example, 10. + type: int + required: False + radio_role_assignment: + description: Role assignment mode for the 2.4GHz radio interface. Accepts "Auto", "Client-serving", or "Monitor". For example, Auto. + type: str + required: False + cable_loss: + description: | + Cable loss in dB for the 2.4GHz radio interface. Valid values are from 0 to 40. + This value must be less than the antenna gain. For example, 2. + type: int + required: False + antenna_cable_name: + description: Name or type of antenna cable used for the 2.4GHz radio interface. For example, "other". + type: str + required: False + channel_assignment_mode: + description: Mode of channel assignment for the 2.4GHz radio interface. Accepts "Global" or "Custom". For example, "Custom". + type: str + required: False + channel_number: + description: Custom channel number configured for the 2.4GHz radio interface. For example, 6. + type: int + required: False + power_assignment_mode: + description: Mode of power assignment for the 2.4GHz radio interface. Accepts "Global" or "Custom". For example, "Custom". + type: str + required: False + power_level: + description: Custom power level configured for the 2.4GHz radio interface. For example, 3. + type: int + required: False + 5ghz_radio: + description: Configuration options for the 5GHz radio interface. + type: dict + required: False + suboptions: + admin_status: + description: Administrative status for the 5GHz radio interface. For example, "Enabled". + type: str + required: False + antenna_name: + description: Name or type of antenna used for the 5GHz radio interface. For example, "other". + type: str + required: False + antenna_gain: + description: | + Antenna gain value in decibels (dB) for the 5GHz radio interface, valid values range + from 0 to 40. For example, 5. + type: int + required: False + radio_role_assignment: + description: | + Role assignment mode for the 5GHz radio interface. Accepts "Auto", "Client-serving", + or "Monitor". For example, "Auto". This field not required for xor series access point slot 1 + type: str + required: False + cable_loss: + description: | + Cable loss in dB for the 5GHz radio interface. Valid values are from 0 to 40. + This value must be less than the antenna gain. For example, 3. + type: int + required: False + antenna_cable_name: + description: Name or type of antenna cable used for the 5GHz radio interface. For example, "other". + type: str + required: False + channel_assignment_mode: + description: Mode of channel assignment for the 5GHz radio interface. Accepts "Global" or "Custom". For example, "Custom". + type: str + required: False + channel_number: + description: Custom channel number configured for the 5GHz radio interface. For example, 36. + type: int + required: False + power_assignment_mode: + description: Mode of power assignment for the 5GHz radio interface. Accepts "Global" or "Custom". For example, "Custom". + type: str + required: False + power_level: + description: Custom power level configured for the 5GHz radio interface. For example, 3. + type: int + required: False + 6ghz_radio: + description: Configuration options for the 6GHz radio interface. + type: dict + required: False + suboptions: + admin_status: + description: Administrative status for the 6GHz radio interface. For example, "Enabled". + type: str + required: False + antenna_name: + description: Name or type of antenna used for the 6GHz radio interface. For example, "other". + type: str + required: False + antenna_gain: + description: | + Antenna gain value in decibels (dB) for the 6GHz radio interface, valid values range + from 0 to 40. For example, 30. + type: int + required: False + radio_role_assignment: + description: Role assignment mode for the 6GHz radio interface. Accepts "Auto", "Client-serving", or "Monitor". + type: str + required: False + cable_loss: + description: | + Cable loss in dB for the 6GHz radio interface. Valid values are from 0 to 40. + This value must be less than the antenna gain. For example, 10. + type: int + required: False + antenna_cable_name: + description: Name or type of antenna cable used for the 6GHz radio interface. For example, "other". + type: str + required: False + channel_assignment_mode: + description: Mode of channel assignment for the 6GHz radio interface. Accepts "Global" or "Custom". For example, "Custom". + type: str + required: False + channel_number: + description: Custom channel number configured for the 6GHz radio interface. For example, 6. + type: int + required: False + power_assignment_mode: + description: Mode of power assignment for the 6GHz radio interface. Accepts "Global" or "Custom". For example, "Custom". + type: str + required: False + power_level: + description: Custom power level configured for the 6GHz radio interface. For example, 3. + type: int + required: False + xor_radio: + description: Configuration options for the XOR radio interface. + type: dict + required: False + suboptions: + admin_status: + description: Administrative status for the XOR radio interface. For example, "Enabled". + type: str + required: False + antenna_name: + description: Name or type of antenna used for the XOR radio interface. For example, "other". + type: str + required: False + antenna_gain: + description: | + Antenna gain value in decibels (dB) for the XOR radio interface, valid values range + from 0 to 40. For example, 14. + type: int + required: False + radio_role_assignment: + description: | + Role assignment mode for the XOR radio interface. Accepts "Auto", "Client-serving", or "Monitor" + If "radio_role_assignment" is set to "Client-serving" only the power level and channel number can be changed. + Additionally, if the 5 GHz band is selected in the radio band, the power level cannot be modified. + For example, "Auto". + type: str + required: False + radio_band: + description: | + Radio band should be enabled if the radio role assignment is set to "Client-serving" mode. + Accepts "2.4 GHz" or "5 GHz" or "6 GHz". + type: str + required: False + cable_loss: + description: | + Cable loss in dB for the XOR radio interface. Valid values are from 0 to 40. + This value must be less than the antenna gain. For example, 5. + type: int + required: False + antenna_cable_name: + description: Name or type of antenna cable used for the XOR radio interface. For example, "other". + type: str + required: False + channel_assignment_mode: + description: | + Mode of channel assignment for the XOR radio interface. Accepts "Global" or "Custom". + - For "Custom" mode and a radio band of "2.4 GHz", valid values are from 1 to 14. + - For "Custom" mode and a radio band of "5 GHz", valid values are + 36, 40, 44, 48, 52, 56, 60, 64, 100, 104, 108, + 112, 116, 120, 124, 128, 132, 136, 140, 144, + 149, 153, 157, 161, 165, 169, 173. + - For "Custom" mode and a radio band of "6 GHz", valid values are + 1, 5, 9, 13, 17, 21, 25, 29, 33, 37, 41, 45, 49, + 53, 57, 61, 65, 69, 73, 77, 81, 85, 89, 93, 97, + 101, 105, 109, 113, 117, 121, 125, 129, 133, 137, + 141, 145, 149, 153, 157, 161, 165, 169, 173, 177, + 181, 185, 189, 193, 197, 201, 205, 209, 213, 217, + 221, 225, 229, 233. + For example, "Custom". + type: str + required: False + channel_number: + description: Custom channel number configured for the XOR radio interface. For example, 6. + type: int + required: False + channel_width: + description: | + Width of the channel configured for the XOR radio interface. Accepts values + "20 MHz", "40 MHz", "80 MHz", "160 MHz" or "320 MHz". For example, 20 MHz. + type: str + required: False + power_assignment_mode: + description: | + Mode of power assignment for the XOR radio interface. Accepts "Global" or "Custom." + In "Custom" mode, valid values range from 1 to 8. + type: str + required: False + power_level: + description: Custom power level configured for the XOR radio interface. For example, 3. + type: int + required: False + tri_radio: + description: Configuration options for the TRI radio interface. + type: dict + required: False + suboptions: + admin_status: + description: Administrative status for the TRI radio interface. For example, "Enabled". + type: str + required: False + antenna_name: + description: Name or type of antenna used for the TRI radio interface. For example, "other". + type: str + required: False + antenna_gain: + description: | + Antenna gain value in decibels (dB) for the TRI radio interface, valid values range + from 0 to 40. For example, 16. + type: int + required: False + radio_role_assignment: + description: | + Role assignment mode for the TRI radio interface. Accepts "Auto", "Client-serving", or "Monitor". + If radio_role_assignment is "client-serving", then only power-level and channel-level can be changed. + type: str + required: False + cable_loss: + description: | + Cable loss in dB for the TRI radio interface. Valid values are from 0 to 40. + This value must be less than the antenna gain. For example, 6. + type: int + required: False + antenna_cable_name: + description: Name or type of antenna cable used for the TRI radio interface. For example, "other". + type: str + required: False + channel_assignment_mode: + description: | + Mode of channel assignment for the TRI radio interface. Accepts "Global" or "Custom". + For Custom, it accepts values like 36, 40, 44, 48, 52, 56, 60, 64, 100, 104, 108, 112, 116, 120, 124, 128, + 132, 136, 140, 144, 149, 153, 157, 161, 165, 169, 173. (eg. Custom) + type: str + required: False + channel_number: + description: Custom channel number configured for the TRI radio interface. For example, 6. + type: int + required: False + channel_width: + description: | + Width of the channel configured for the TRI radio interface. Accepts values + "20 MHz", "40 MHz", "80 MHz", "160 MHz", or "320 MHz". . For example, 20 MHz. + type: str + required: False + power_assignment_mode: + description: | + Mode of power assignment for the TRI radio interface. Accepts "Global" or "Custom". + In Custom, it accepts values 1 to 8. + type: str + required: False + power_level: + description: Custom power level configured for the TRI radio interface. For example, 3. + type: int + required: False + dual_radio_mode: + description: | + Mode of operation configured for the TRI radio interface. Specifies how the + access point (AP) manages its dual radio functionality. eg . Auto + type: str + required: False requirements: - dnacentersdk >= 2.7.2 From fb270d33bf8e31fca23603c156715ab03585321b Mon Sep 17 00:00:00 2001 From: md-rafeek Date: Tue, 29 Oct 2024 11:44:26 +0530 Subject: [PATCH 12/83] Typo error on docstring --- plugins/modules/accesspoint_workflow_manager.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plugins/modules/accesspoint_workflow_manager.py b/plugins/modules/accesspoint_workflow_manager.py index c897004e8..1ee1ec1de 100644 --- a/plugins/modules/accesspoint_workflow_manager.py +++ b/plugins/modules/accesspoint_workflow_manager.py @@ -3941,10 +3941,10 @@ def bulk_ap_update(self, bulk_config): Parameters: self (dict): A dictionary used to collect the execution results. - ap_list (list): A list containing the APs mac address which need to reset or reboot. + bulk_config (dict): A dict containing the APs mac address and cofig details which need to update APs. Returns: - dict: A dictionary containing the result of the access point reset/reboot status. + dict: A dictionary containing the result of the buld access point update status. """ ap_exist, ap_details = self.get_accesspoint_details(bulk_config) self.payload["access_point_details"] = ap_details From 884a7dcca5c5f5ae787f8662d8c4c04983c170e9 Mon Sep 17 00:00:00 2001 From: Syed-khadeerahmed Date: Tue, 29 Oct 2024 14:09:20 +0530 Subject: [PATCH 13/83] fixing swim --- playbooks/swim_workflow_manager.yml | 2 +- plugins/modules/swim_workflow_manager.py | 206 ++++++++++++++--------- 2 files changed, 128 insertions(+), 80 deletions(-) diff --git a/playbooks/swim_workflow_manager.yml b/playbooks/swim_workflow_manager.yml index 024689309..481024940 100644 --- a/playbooks/swim_workflow_manager.yml +++ b/playbooks/swim_workflow_manager.yml @@ -24,7 +24,7 @@ config: - tagging_details: image_name: cat9k_iosxe.17.15.01.SPA.bin - device_role: [Distribution] + device_role: ("core","Distribution") device_image_family_name: Cisco Catalyst 9300 Switch # site_name: Global/Chennai/LTTS/FLOOR1 tagging: True diff --git a/plugins/modules/swim_workflow_manager.py b/plugins/modules/swim_workflow_manager.py index d040a1b00..ffe2e5ca3 100644 --- a/plugins/modules/swim_workflow_manager.py +++ b/plugins/modules/swim_workflow_manager.py @@ -1522,61 +1522,80 @@ def get_diff_tagging(self): tag_image_golden = tagging_details.get("tagging") image_name = self.get_image_name_from_id(self.have.get("tagging_image_id")) device_role=tagging_details.get("device_role", "ALL") - - status = "true" - for role in device_role: - image_params = dict( - image_id=self.have.get("tagging_image_id"), - site_id=self.have.get("site_id"), - device_family_identifier=self.have.get("device_family_identifier"), - device_role=role.upper() - ) - + already_un_tagged_device_role = [] + already_tagged_device_role = [] + + for role in device_role.replace('(', '').replace(')', '').replace('"', '').split(','): + image_params = { + "image_id": self.have.get("tagging_image_id"), + "site_id": self.have.get("site_id"), + "device_family_identifier": self.have.get("device_family_identifier"), + "device_role": role.upper() + } + self.log(image_params) + response = self.dnac._exec( family="software_image_management_swim", - function='get_golden_tag_status_of_an_image', + function="get_golden_tag_status_of_an_image", op_modifies=True, params=image_params ) - self.log("Received API response from 'get_golden_tag_status_of_an_image': {0}".format(str(response)), "DEBUG") + self.log(f"Received API response from 'get_golden_tag_status_of_an_image': {str(response)}", "DEBUG") - response = response.get('response') - if response: - image_status = response['taggedGolden'] - if image_status and image_status == tag_image_golden: - self.status = "success" - self.msg = "SWIM Image '{0}' already tagged as Golden image in Cisco Catalyst Center for the device role - {1}".format(image_name,role) - self.log(self.msg, "INFO") - else: - status = "false" + api_response = response.get('response') + if api_response: + image_status = api_response.get('taggedGolden') + + if image_status == tag_image_golden: + msg = f"SWIM Image '{image_name}' already tagged as Golden image in Cisco Catalyst Center" + self.log(msg, "INFO") + already_tagged_device_role.append(role) + if not image_status and image_status == tag_image_golden: - self.status = "success" - self.msg = "SWIM Image '{0}' already un-tagged from Golden image in Cisco Catalyst Cente for the device role - {1}".format(image_name,role) - self.log(self.msg, "INFO") + msg = f"SWIM Image '{image_name}' already un-tagged from Golden image in Cisco Catalyst Center" + self.log(msg, "INFO") + already_un_tagged_device_role.append(role) - if status == "true": - self.status = "success" - self.result['changed'] = False - self.msg = "SWIM Image '{0}' already tagged as Golden image in Cisco Catalyst Center for all the roles".format(image_name) - self.result['msg'] = self.msg - self.result['response'] = self.msg - self.log(self.msg, "INFO") - return self + # Check if all roles are tagged as Golden + device_role=tagging_details.get("device_role", "ALL") + device_role_list = device_role.replace('(', '').replace(')', '').replace('"', '').split(',') + device_role = ', '.join(role.strip() for role in device_role_list) + if tag_image_golden: + if len(already_tagged_device_role) == len(device_role.split(',')): + self.status = "success" + self.result['changed'] = False + self.msg = "SWIM Image '{0}' already tagged as Golden image in Cisco Catalyst Center for the roles - {1}.".format(image_name, device_role) + self.result['msg'] = self.msg + self.result['response'] = self.msg + self.log(self.msg, "INFO") + return self + else: + if len(already_un_tagged_device_role) == len(device_role.split(',')): + self.log("inside logic") + self.status = "success" + self.result['changed'] = False + self.msg = "SWIM Image '{0}' already un-tagged as Golden image in Cisco Catalyst Center for the roles - {1}.".format(image_name, device_role) + self.result['msg'] = self.msg + self.result['response'] = self.msg + self.log(self.msg, "INFO") + return self + +#-------------------------------------------------------------------------------------------------------------# if tag_image_golden: - image_params = dict( + image_param = dict( site_id=self.have.get("site_id"), device_family_identifier=self.have.get("device_family_identifier"), device_role="ALL", image_id=self.have.get("tagging_image_id"), ) - self.log("Parameters for un-tagging the image as golden: {0}".format(str(image_params)), "INFO") + self.log("Parameters for un-tagging the image as golden: {0}".format(str(image_param)), "INFO") response = self.dnac._exec( family="software_image_management_swim", function='remove_golden_tag_for_image', op_modifies=True, - params=image_params + params=image_param ) self.log("Received API response from 'remove_golden_tag_for_image': {0}".format(str(response)), "DEBUG") if not response: @@ -1585,7 +1604,7 @@ def get_diff_tagging(self): device_role=tagging_details.get("device_role", "ALL") - for role in device_role: + for role in device_role.replace('(', '').replace(')', '').replace('"', '').split(','): image_params = dict( imageId=self.have.get("tagging_image_id"), siteId=self.have.get("site_id"), @@ -1594,24 +1613,41 @@ def get_diff_tagging(self): ) self.log(f"Parameters for tagging the image as golden for role {role}: {str(image_params)}", "INFO") + image_params = dict( + imageId=self.have.get("tagging_image_id"), + siteId=self.have.get("site_id"), + deviceFamilyIdentifier=self.have.get("device_family_identifier"), + deviceRole=role.upper() + ) + self.log("Parameters for tagging the image as golden: {0}".format(str(image_params)), "INFO") + response = self.dnac._exec( family="software_image_management_swim", function='tag_as_golden_image', op_modifies=True, params=image_params ) - self.log(f"Received API response from 'tag_as_golden_image' for role {role}: {str(response)}", "DEBUG") + self.log("Received API response from 'tag_as_golden_image': {0}".format(str(response)), "DEBUG") + + else: + for role in device_role.replace('(', '').replace(')', '').replace('"', '').split(','): + # Create parameters for the API call + image_params = { + "image_id": self.have.get("tagging_image_id"), + "site_id": self.have.get("site_id"), + "device_family_identifier": self.have.get("device_family_identifier"), + "device_role": role.upper() + } + self.log(f"Parameters for un-tagging the image as golden for role {role}: {str(image_params)}", "INFO") - else: - self.log("Parameters for un-tagging the image as golden: {0}".format(str(image_params)), "INFO") + response = self.dnac._exec( + family="software_image_management_swim", + function='remove_golden_tag_for_image', + op_modifies=True, + params=image_params + ) + self.log(f"Received API response from 'remove_golden_tag_for_image': {str(response)}", "DEBUG") - response = self.dnac._exec( - family="software_image_management_swim", - function='remove_golden_tag_for_image', - op_modifies=True, - params=image_params - ) - self.log("Received API response from 'remove_golden_tag_for_image': {0}".format(str(response)), "DEBUG") if not response: self.status = "failed" @@ -1623,8 +1659,10 @@ def get_diff_tagging(self): task_details = {} task_id = response.get("response").get("taskId") + device_family=tagging_details.get("device_image_family_name") device_role=tagging_details.get("device_role", "ALL") - device_role = ', '.join(device_role) + device_role_list = device_role.replace('(', '').replace(')', '').replace('"', '').split(',') + device_role = ', '.join(role.strip() for role in device_role_list) site_name = tagging_details.get("site_name") if not site_name: @@ -1635,32 +1673,42 @@ def get_diff_tagging(self): while True: task_details = self.get_task_details(task_id) - if not task_details.get("isError") and 'successful' in task_details.get("progress"): - self.status = "success" - self.result['changed'] = True - self.msg = "Tagging image {0} golden for site {1} for family Cisco Catalyst 9300 Switch for device deviceTag - {2} successful".format(image_name, site_name, device_role) - self.result['msg'] = self.msg - self.result['response'] = self.msg - self.log(self.msg, "INFO") - break - elif task_details.get("isError"): - failure_reason = task_details.get("failureReason", "") - if failure_reason and "An inheritted tag cannot be un-tagged" in failure_reason: - self.status = "failed" - self.result['changed'] = False - self.msg = failure_reason - self.result['msg'] = failure_reason - self.log(self.msg, "ERROR") + if tag_image_golden: + if not task_details.get("isError") and 'successful' in task_details.get("progress"): + self.status = "success" + self.result['changed'] = True + self.msg = "Tagging image {0} golden for site {1} for family {2} for device deviceTag - {3} successful".format(image_name, site_name, device_family, device_role) + self.result['msg'] = self.msg self.result['response'] = self.msg + self.log(self.msg, "INFO") break - else: - error_message = task_details.get("failureReason", "Error: while tagging/un-tagging the golden swim image.") - self.status = "failed" - self.msg = error_message - self.result['msg'] = error_message - self.log(self.msg, "ERROR") + else: + if not task_details.get("isError") and 'successful' in task_details.get("progress"): + self.status = "success" + self.result['changed'] = True + self.msg = "Un-Tagging image {0} golden for site {1} for family {2} for device deviceTag - {3} successful".format(image_name, site_name, device_family, device_role) + self.result['msg'] = self.msg self.result['response'] = self.msg + self.log(self.msg, "INFO") break + elif task_details.get("isError"): + failure_reason = task_details.get("failureReason", "") + if failure_reason and "An inheritted tag cannot be un-tagged" in failure_reason: + self.status = "failed" + self.result['changed'] = False + self.msg = failure_reason + self.result['msg'] = failure_reason + self.log(self.msg, "ERROR") + self.result['response'] = self.msg + break + else: + error_message = task_details.get("failureReason", "Error: while tagging/un-tagging the golden swim image.") + self.status = "failed" + self.msg = error_message + self.result['msg'] = error_message + self.log(self.msg, "ERROR") + self.result['response'] = self.msg + break return self @@ -2127,14 +2175,14 @@ def verify_diff_tagged(self): image_name = self.get_image_name_from_id(image_id) device_role = tagging_details.get("device_role", "ALL") - for role in device_role: + for role in device_role.replace('(', '').replace(')', '').replace('"', '').split(','): image_params = dict( image_id=self.have.get("tagging_image_id"), site_id=self.have.get("site_id"), device_family_identifier=self.have.get("device_family_identifier"), - device_role=role.upper() # Ensure role is uppercase + device_role=role.upper() ) - self.log(f"Parameters for checking the status of image: {str(image_params)}", "INFO") + self.log("Parameters for checking the status of image: {0}".format(str(image_params)), "INFO") response = self.dnac._exec( family="software_image_management_swim", @@ -2142,19 +2190,19 @@ def verify_diff_tagged(self): op_modifies=True, params=image_params ) - self.log(f"Received API response from 'get_golden_tag_status_of_an_image': {str(response)}", "DEBUG") + self.log("Received API response from 'get_golden_tag_status_of_an_image': {0}".format(str(response)), "DEBUG") response = response.get('response') if response: - image_status = response.get('taggedGolden') + image_status = response['taggedGolden'] if image_status == tag_image_golden: if tag_image_golden: - self.msg = f"""The requested image '{image_name}' has been tagged as golden in the Cisco Catalyst Center and - its status has been successfully verified.""" + self.msg = """The requested image '{0}' has been tagged as golden in the Cisco Catalyst Center and + its status has been successfully verified.""".format(image_name) self.log(self.msg, "INFO") else: - self.msg = f"""The requested image '{image_name}' has been un-tagged as golden in the Cisco Catalyst Center and - image status has been verified.""" + self.msg = """The requested image '{0}' has been un-tagged as golden in the Cisco Catalyst Center and + image status has been verified.""".format(image_name) self.log(self.msg, "INFO") else: self.log("""Mismatch between the playbook input for tagging/un-tagging image as golden and the Cisco Catalyst Center indicates that @@ -2330,4 +2378,4 @@ def main(): if __name__ == '__main__': - main() + main() \ No newline at end of file From 2eb3edfbb043494f853196c04aa9f2363695099d Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Tue, 29 Oct 2024 13:27:40 -0700 Subject: [PATCH 14/83] Modified the sanity_tests_devel file to run sanity tests only on the PR changes --- .github/workflows/sanity_tests.yml | 11 +++++++++++ .github/workflows/sanity_tests_devel.yml | 22 ++++++++++++++++++++-- 2 files changed, 31 insertions(+), 2 deletions(-) diff --git a/.github/workflows/sanity_tests.yml b/.github/workflows/sanity_tests.yml index ea96c061f..d5f61e0e0 100644 --- a/.github/workflows/sanity_tests.yml +++ b/.github/workflows/sanity_tests.yml @@ -1,13 +1,16 @@ name: CI + on: push: branches: [main] schedule: - cron: '0 6 * * *' workflow_dispatch: + env: NAMESPACE: cisco COLLECTION_NAME: dnac + jobs: # https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix # 2.14 supports Python 3.9-3.11 @@ -25,26 +28,34 @@ jobs: - stable-2.16 - stable-2.17 runs-on: ubuntu-22.04 + steps: - name: Check out code uses: actions/checkout@v4 with: path: cisco-en-programmability/dnacenter-ansible + - name: Create directory run: mkdir -p ./ansible_collections/${{env.NAMESPACE}} + - name: Move repository run: mv ./cisco-en-programmability/dnacenter-ansible ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}} + - name: Set up Python uses: actions/setup-python@v5 with: python-version: '3.11' + - name: Install ansible-base (${{ matrix.ansible }}) run: pip install https://github.com/ansible/ansible/archive/${{ matrix.ansible }}.tar.gz --disable-pip-version-check + - name: Run sanity tests run: ansible-test sanity --docker -v --color working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}} + - name: Install yamllint run: pip install --user yamllint + - name: Run yamllint run: yamllint -c .yamllint.yml . working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}} diff --git a/.github/workflows/sanity_tests_devel.yml b/.github/workflows/sanity_tests_devel.yml index 05cae252e..0736d8180 100644 --- a/.github/workflows/sanity_tests_devel.yml +++ b/.github/workflows/sanity_tests_devel.yml @@ -1,10 +1,13 @@ name: CI Devel + on: workflow_dispatch: pull_request: + env: NAMESPACE: cisco COLLECTION_NAME: dnac + jobs: sanity: name: Sanity (â’¶${{ matrix.ansible }}) @@ -13,26 +16,41 @@ jobs: ansible: - devel runs-on: ubuntu-22.04 + steps: - name: Check out code uses: actions/checkout@v4 with: path: cisco-en-programmability/dnacenter-ansible + - name: Create directory run: mkdir -p ./ansible_collections/${{env.NAMESPACE}} + - name: Move repository run: mv ./cisco-en-programmability/dnacenter-ansible ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}} + - name: Set up Python uses: actions/setup-python@v5 with: python-version: '3.11' + - name: Install ansible-base (${{ matrix.ansible }}) run: pip install https://github.com/ansible/ansible/archive/${{ matrix.ansible }}.tar.gz --disable-pip-version-check + + - name: Get changed files + id: changed_files + run: | + git fetch origin main + echo "changed_files=$(git diff --name-only origin/main HEAD)" >> $GITHUB_ENV + echo "Changed files: ${{ env.changed_files }}" + - name: Run sanity tests - run: ansible-test sanity --docker -v --color + run: ansible-test sanity --docker -v --color ${{ env.changed_files }} working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}} + - name: Install yamllint run: pip install --user yamllint + - name: Run yamllint - run: yamllint -c .yamllint.yml . + run: yamllint -c .yamllint.yml ${{ env.changed_files }} working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}} From aaa9ab347314265104b984c3b0d1b50b5bc699a6 Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Tue, 29 Oct 2024 14:57:13 -0700 Subject: [PATCH 15/83] updated the sanity_tests_devel file --- .github/workflows/sanity_tests_devel.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/sanity_tests_devel.yml b/.github/workflows/sanity_tests_devel.yml index 0736d8180..97e744e73 100644 --- a/.github/workflows/sanity_tests_devel.yml +++ b/.github/workflows/sanity_tests_devel.yml @@ -40,6 +40,7 @@ jobs: - name: Get changed files id: changed_files run: | + cd ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}} git fetch origin main echo "changed_files=$(git diff --name-only origin/main HEAD)" >> $GITHUB_ENV echo "Changed files: ${{ env.changed_files }}" From 1d3016bda7a7808506d2c92043d5e8e7b17e15ce Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Tue, 29 Oct 2024 15:01:56 -0700 Subject: [PATCH 16/83] updated the sanity_tests_devel file --- .github/workflows/sanity_tests_devel.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/sanity_tests_devel.yml b/.github/workflows/sanity_tests_devel.yml index 97e744e73..0c1b00cb7 100644 --- a/.github/workflows/sanity_tests_devel.yml +++ b/.github/workflows/sanity_tests_devel.yml @@ -42,8 +42,9 @@ jobs: run: | cd ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}} git fetch origin main - echo "changed_files=$(git diff --name-only origin/main HEAD)" >> $GITHUB_ENV - echo "Changed files: ${{ env.changed_files }}" + changed_files=$(git diff --name-only origin/main HEAD) + echo "changed_files=$changed_files" >> $GITHUB_ENV + echo "Changed files: $changed_files" - name: Run sanity tests run: ansible-test sanity --docker -v --color ${{ env.changed_files }} From 72e9968791641422fecadfe32b73f967907f6e6f Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Tue, 29 Oct 2024 15:10:03 -0700 Subject: [PATCH 17/83] updated the sanity_tests_devel file --- .github/workflows/sanity_tests_devel.yml | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/.github/workflows/sanity_tests_devel.yml b/.github/workflows/sanity_tests_devel.yml index 0c1b00cb7..b2e3ddcb7 100644 --- a/.github/workflows/sanity_tests_devel.yml +++ b/.github/workflows/sanity_tests_devel.yml @@ -43,16 +43,18 @@ jobs: cd ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}} git fetch origin main changed_files=$(git diff --name-only origin/main HEAD) - echo "changed_files=$changed_files" >> $GITHUB_ENV + echo "::set-output name=changed_files::$changed_files" echo "Changed files: $changed_files" - name: Run sanity tests - run: ansible-test sanity --docker -v --color ${{ env.changed_files }} + if: steps.changed_files.outputs.changed_files != '' + run: ansible-test sanity --docker -v --color --test ${{ steps.changed_files.outputs.changed_files }} working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}} - name: Install yamllint run: pip install --user yamllint - name: Run yamllint - run: yamllint -c .yamllint.yml ${{ env.changed_files }} + if: steps.changed_files.outputs.changed_files != '' + run: yamllint -c .yamllint.yml ${{ steps.changed_files.outputs.changed_files }} working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}} From 90ea7d5eda03e046f16715d6d883f00064d234e1 Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Tue, 29 Oct 2024 15:12:38 -0700 Subject: [PATCH 18/83] updated the sanity_tests_devel file --- .github/workflows/sanity_tests_devel.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/sanity_tests_devel.yml b/.github/workflows/sanity_tests_devel.yml index b2e3ddcb7..4b51ba059 100644 --- a/.github/workflows/sanity_tests_devel.yml +++ b/.github/workflows/sanity_tests_devel.yml @@ -48,7 +48,7 @@ jobs: - name: Run sanity tests if: steps.changed_files.outputs.changed_files != '' - run: ansible-test sanity --docker -v --color --test ${{ steps.changed_files.outputs.changed_files }} + run: ansible-test sanity --docker -v --color --path ${{ steps.changed_files.outputs.changed_files }} working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}} - name: Install yamllint From 9d6189c0426ba574db054b06ab0f67341da38825 Mon Sep 17 00:00:00 2001 From: md-rafeek Date: Wed, 30 Oct 2024 14:51:14 +0530 Subject: [PATCH 19/83] removed the assign device to site common code removed --- plugins/module_utils/dnac.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/plugins/module_utils/dnac.py b/plugins/module_utils/dnac.py index 68b08c236..def57b857 100644 --- a/plugins/module_utils/dnac.py +++ b/plugins/module_utils/dnac.py @@ -870,15 +870,6 @@ def assign_device_to_site(self, device_ids, site_name, site_id): Assigns the specified devices to the site. If the assignment is successful, returns True. Otherwise, logs an error and returns False along with error details. """ - site_response = self.get_site(site_name) - if site_response.get("response") and site_response["response"][0].get("type"): - site_type = site_response["response"][0].get("type") - if site_type not in ("building", "floor"): - self.msg = "Device(s) can only be assigned to building/floor" - self.log(self.msg, "ERROR") - self.status = "failed" - self.module.fail_json(msg=self.msg) - if self.get_ccc_version_as_integer() <= self.get_ccc_version_as_int_from_str("2.3.5.3"): try: response = self.dnac_apply['exec']( From 4a9d5f301d3169b72ee619b6a1bbc49eb3ce5a35 Mon Sep 17 00:00:00 2001 From: md-rafeek Date: Wed, 30 Oct 2024 14:51:45 +0530 Subject: [PATCH 20/83] removed the assign device to site common code removed --- plugins/module_utils/dnac.py | 1 + 1 file changed, 1 insertion(+) diff --git a/plugins/module_utils/dnac.py b/plugins/module_utils/dnac.py index def57b857..90e2e438b 100644 --- a/plugins/module_utils/dnac.py +++ b/plugins/module_utils/dnac.py @@ -870,6 +870,7 @@ def assign_device_to_site(self, device_ids, site_name, site_id): Assigns the specified devices to the site. If the assignment is successful, returns True. Otherwise, logs an error and returns False along with error details. """ + if self.get_ccc_version_as_integer() <= self.get_ccc_version_as_int_from_str("2.3.5.3"): try: response = self.dnac_apply['exec']( From b4d55cd226ab635a20225e9f23920f3812655fa5 Mon Sep 17 00:00:00 2001 From: md-rafeek Date: Sun, 3 Nov 2024 20:06:23 +0530 Subject: [PATCH 21/83] Bug fixed realted to the site --- plugins/modules/pnp_workflow_manager.py | 106 ++++++++++++------------ 1 file changed, 54 insertions(+), 52 deletions(-) diff --git a/plugins/modules/pnp_workflow_manager.py b/plugins/modules/pnp_workflow_manager.py index aff7ee65e..dfa0ef6a2 100644 --- a/plugins/modules/pnp_workflow_manager.py +++ b/plugins/modules/pnp_workflow_manager.py @@ -391,12 +391,13 @@ def validate_input(self): valid_pnp, invalid_params = validate_list_of_dicts( self.config, pnp_spec ) + if invalid_params: self.msg = "Invalid parameters in playbook: {0}".format( "\n".join(invalid_params)) self.log(str(self.msg), "ERROR") - self.status = "failed" - return self + self.set_operation_result("failed", False, self.msg, "ERROR").check_return_status() + self.validated_config = valid_pnp self.msg = "Successfully validated playbook config params: {0}".format(str(valid_pnp)) self.log(str(self.msg), "INFO") @@ -419,29 +420,27 @@ def get_site_details(self): Post creation of the validated input, we this method gets the site_id and checks whether the site exists or not """ - site_exists = False site_id = None response = None try: response = self.get_site(self.want.get("site_name")) + if response: + self.log("Received site details for '{0}': {1}".format(self.want.get("site_name"), + str(response)), "DEBUG") + site = response.get("response") + if len(site) == 1: + site_id = site[0].get("id") + site_exists = True + self.log("Site Name: {1}, Site ID: {0}".format(site_id, + self.want.get("site_name")), "INFO") + return (site_exists, site_id) except Exception: - self.log("Exception occurred as site \ - '{0}' was not found".format(self.want.get("site_name")), "CRITICAL") - self.module.fail_json(msg="Site not found", response=[]) - - if response: - self.log("Received site details \ - for '{0}': {1}".format(self.want.get("site_name"), str(response)), "DEBUG") - site = response.get("response") - if len(site) == 1: - site_id = site[0].get("id") - site_exists = True - self.log("Site Name: {1}, Site ID: {0}".format(site_id, self.want.get("site_name")), "INFO") - - return (site_exists, site_id) + self.msg = "Exception occurred as site '{0}' was not found".format(self.want.get("site_name")) + self.log(self.msg, "CRITICAL") + self.set_operation_result("failed", False, self.msg, "ERROR").check_return_status() def get_site_type(self): """ @@ -461,14 +460,26 @@ def get_site_type(self): try: response = self.get_site(self.want.get("site_name")) + if response: + self.log("Received site details for '{0}': {1}".format(self.want.get("site_name"), + str(response)), "DEBUG") + site = response.get("response") + site_additional_info = site[0].get("additionalInfo") + site_type = None + for item in site_additional_info: + if item["nameSpace"] == "Location": + site_type = item.get("attributes").get("type") + self.log("Site type for site name '{1}' : {0}". + format(site_type, self.want.get("site_name")), "INFO") + return site_type except Exception: - self.log("Exception occurred as \ - site '{0}' was not found".format(self.want.get("site_name")), "CRITICAL") - self.module.fail_json(msg="Site not found", response=[]) + self.msg = "Exception occurred as site '{0}' was not found".format(self.want.get("site_name")) + self.log(self.msg, "CRITICAL") + self.set_operation_result("failed", False, self.msg, "ERROR").check_return_status() if response: - self.log("Received site details\ - for '{0}': {1}".format(self.want.get("site_name"), str(response)), "DEBUG") + self.log("Received site details for '{0}': {1}".format(self.want.get("site_name"), + str(response)), "DEBUG") site = response.get("response") site_additional_info = site[0].get("additionalInfo") for item in site_additional_info: @@ -689,8 +700,8 @@ def get_have(self): if len(self.want.get('pnp_params')) == 1: # check if given device exists in pnp inventory, store device Id device_response = self.get_device_list_pnp(self.want.get("serial_number")) - self.log("Device details for the device with serial \ - number '{0}': {1}".format(self.want.get("serial_number"), self.pprint(device_response)), "DEBUG") + self.log("Device details for the device with serial number '{0}': {1}". + format(self.want.get("serial_number"), self.pprint(device_response)), "DEBUG") if not device_response: self.log("Device with serial number {0} is not found in the inventory".format(self.want.get("serial_number")), "WARNING") @@ -738,8 +749,7 @@ def get_have(self): not self.want.get('pnp_params')[0].get('deviceInfo'): self.msg = "The site name must be a string" self.log(str(self.msg), "ERROR") - self.status = "failed" - return self + self.set_operation_result("failed", False, self.msg, "ERROR").check_return_status() site_name = self.want.get("site_name") (site_exists, site_id) = self.get_site_details() @@ -760,16 +770,14 @@ def get_have(self): self.msg = "The image '{0}' is either not present or not tagged as 'Golden' in the Cisco Catalyst Center."\ " Please verify its existence and its tag status.".format(self.validated_config[0].get("image_name")) self.log(self.msg, "CRITICAL") - self.status = "failed" - return self + self.set_operation_result("failed", False, self.msg, "ERROR").check_return_status() if len(image_list) == 1: if install_mode != "INSTALL": self.msg = "The system must be in INSTALL mode to upgrade the image. The current mode is '{0}'."\ " Please switch to INSTALL mode to proceed.".format(install_mode) self.log(str(self.msg), "CRITICAL") - self.status = "failed" - return self + self.set_operation_result("failed", False, self.msg, "ERROR").check_return_status() have["image_id"] = image_list[0].get("imageUuid") self.log("Image ID for the image '{0}': {1}".format(self.want.get('image_params').get('image_name'), str(have["image_id"])), "INFO") @@ -780,8 +788,7 @@ def get_have(self): self.msg = "Either project not found"\ " or it is Empty." self.log(self.msg, "CRITICAL") - self.status = "failed" - return self + self.set_operation_result("failed", False, self.msg, "ERROR").check_return_status() template_details = get_dict_result(template_list, 'name', template_name) if template_details: @@ -789,15 +796,13 @@ def get_have(self): else: self.msg = "Template '{0}' is not found.".format(template_name) self.log(self.msg, "CRITICAL") - self.status = "failed" - return self + self.set_operation_result("failed", False, self.msg, "ERROR").check_return_status() else: if not self.want.get('pnp_params')[0].get('deviceInfo'): self.msg = "Either Site Name or Device details must be added." self.log(self.msg, "ERROR") - self.status = "failed" - return self + self.set_operation_result("failed", False, self.msg, "ERROR").check_return_status() self.msg = "Successfully collected all project and template \ parameters from Cisco Catalyst Center for comparison" @@ -880,8 +885,7 @@ class instance for further use. if not isinstance(self.want.get("pnp_params"), list): self.msg = "Device Info must be passed as a list" self.log(self.msg, "ERROR") - self.status = "failed" - return self + self.set_operation_result("failed", False, self.msg, "ERROR").check_return_status() if len(self.want.get("pnp_params")) > 1: devices_added = [] @@ -922,8 +926,7 @@ class instance for further use. self.msg = "Bulk import failed" self.log(self.msg, "CRITICAL") - self.status = "failed" - return self + self.set_operation_result("failed", False, self.msg, "ERROR").check_return_status() provisioned_count_params = { "serial_number": self.want.get("serial_number"), @@ -939,8 +942,7 @@ class instance for further use. if not self.want['pnp_params']: self.msg = "Device needs to be added before claiming. Please add device_info" self.log(self.msg, "ERROR") - self.status = "failed" - return self + self.set_operation_result("failed", False, self.msg, "ERROR").check_return_status() if not self.want["site_name"]: self.log("Adding device to pnp database", "INFO") @@ -957,7 +959,7 @@ class instance for further use. else: self.msg = "Device Addition Failed" self.log(self.result['msg'], "CRITICAL") - self.status = "failed" + self.set_operation_result("failed", False, self.msg, "ERROR").check_return_status() return self @@ -983,7 +985,7 @@ class instance for further use. else: self.msg = "Device Claim Failed" self.log(self.result['msg'], "CRITICAL") - self.status = "failed" + self.set_operation_result("failed", False, self.msg, "ERROR").check_return_status() return self @@ -1222,7 +1224,7 @@ def get_device_list_pnp(self, serial_number): except Exception as e: msg = "An error occurred while retrieving device with serial number {0}: {1}".format(serial_number, str(e)) self.log(msg + str(e), "WARNING") - self.module.fail_json(msg=msg) + self.set_operation_result("failed", False, msg, "ERROR").check_return_status() def get_device_by_id_pnp(self, device_id): """ @@ -1254,12 +1256,12 @@ def get_device_by_id_pnp(self, device_id): # If no device found, raise an error msg = "No device found with device id: {0}".format(device_id) self.log(msg, "WARNING") - self.module.fail_json(msg=msg) + self.set_operation_result("failed", False, msg, "ERROR").check_return_status() except Exception as e: msg = "An error occurred while retrieving device with device id {0}: {1}".format(device_id, str(e)) self.log(msg + str(e), "WARNING") - self.module.fail_json(msg=msg) + self.set_operation_result("failed", False, msg, "ERROR").check_return_status() def add_pnp_device(self, pnp_params): """ @@ -1296,7 +1298,7 @@ def add_pnp_device(self, pnp_params): except Exception as e: msg = "Unable to add the PNP device with parameters: {0}. Error: {1}".format(pnp_params, str(e)) self.log(msg + str(e), "WARNING") - self.module.fail_json(msg=msg) + self.set_operation_result("failed", False, msg, "ERROR").check_return_status() def pnp_device_count(self, pnp_params): """ @@ -1326,12 +1328,12 @@ def pnp_device_count(self, pnp_params): # If the response is empty, log a warning msg = "No response received when trying to get the PNP device count for parameters: {0}".format(pnp_params) self.log(msg, "WARNING") - self.module.fail_json(msg=msg) + self.set_operation_result("failed", False, msg, "ERROR").check_return_status() except Exception as e: msg = "Unable to get the PNP device count for parameters: {0}. Error: {1}".format(pnp_params, str(e)) self.log(msg + str(e), "WARNING") - self.module.fail_json(msg=msg) + self.set_operation_result("failed", False, msg, "ERROR").check_return_status() def claim_device_site(self, claim_params): """ @@ -1362,12 +1364,12 @@ def claim_device_site(self, claim_params): # If the response is empty, log a warning msg = "No response received when trying to claim the device to site with parameters: {0}".format(claim_params) self.log(msg, "WARNING") - self.module.fail_json(msg=msg) + self.set_operation_result("failed", False, msg, "ERROR").check_return_status() except Exception as e: msg = "Unable to claim the device to site with parameters: {0}. Error: {1}".format(claim_params, str(e)) self.log(msg + str(e), "WARNING") - self.module.fail_json(msg=msg) + self.set_operation_result("failed", False, msg, "ERROR").check_return_status() def main(): From cab2119caa572a33e84016f9a2e74d13c2e2b20f Mon Sep 17 00:00:00 2001 From: MUTHU-RAKESH-27 <19cs127@psgitech.ac.in> Date: Tue, 5 Nov 2024 16:55:05 +0530 Subject: [PATCH 22/83] Fixed the issue with tcp_mss_adjustment, update issue with localAutonomousSystemNumber, import_external_routes and is_default_exit and L2 handoff for a BN --- .../sda_fabric_devices_workflow_manager.py | 65 ++++++++++++++----- 1 file changed, 50 insertions(+), 15 deletions(-) diff --git a/plugins/modules/sda_fabric_devices_workflow_manager.py b/plugins/modules/sda_fabric_devices_workflow_manager.py index 3e3416300..b6d80b330 100644 --- a/plugins/modules/sda_fabric_devices_workflow_manager.py +++ b/plugins/modules/sda_fabric_devices_workflow_manager.py @@ -130,14 +130,14 @@ is_default_exit: description: - Indicates whether this Border Node serves as the default gateway for traffic exiting the virtual network. - - The `is_default_exit` can be updated. + - The `is_default_exit` cannot be updated. type: bool default: true import_external_routes: description: - Determines whether routes from external networks are imported into the fabric. - Enhances security by limiting route usage to internal routes. - - The 'import_external_routes' can be updated. + - The 'import_external_routes' cannot be updated. type: bool default: true border_priority: @@ -2442,7 +2442,7 @@ def get_device_params(self, fabric_id, network_id, device_details, config_index) network_id (str): The Id of the network device. device_details (dict): Playbook details containing fabric devices details along with the Border Settings, L2 Handoff, L3 SDA Handoff, L3 IP Handoff information. - config_index (int) - Pointer to the device_config elements in the playbook. + config_index (int): Pointer to the device_config elements in the playbook. Returns: device_info (dict): The processed device details from the user playbook. Description: @@ -2523,8 +2523,10 @@ def get_device_params(self, fabric_id, network_id, device_details, config_index) have_border_settings = None # Get the border settings details from the Cisco Catalyst Center, if available - if not borders_settings: + if have_device_details: have_border_settings = have_device_details.get("borderDeviceSettings") + + if not borders_settings: if not have_border_settings: self.msg = ( "The parameter 'border_settings' is mandatory when the 'device_roles' has 'BORDER_NODE' " @@ -2536,7 +2538,10 @@ def get_device_params(self, fabric_id, network_id, device_details, config_index) device_info.update({ "borderDeviceSettings": have_border_settings }) - self.log("Border settings retrieved from existing data: {}".format(have_border_settings), "DEBUG") + self.log( + "Border settings retrieved from existing data: {have_border_settings}" + .format(have_border_settings=have_border_settings), "DEBUG" + ) return device_info self.log("Processing user-provided border settings", "DEBUG") @@ -2581,8 +2586,8 @@ def get_device_params(self, fabric_id, network_id, device_details, config_index) if "LAYER_3" in border_types: if not (layer3_settings or have_layer3_settings): self.msg = ( - "The parameter 'border_settings' is mandatory when the 'device_roles' has 'BORDER_NODE' " - "for the device {ip}.".format(ip=device_ip) + "The parameter 'layer3_settings' is mandatory under 'borders_settings' when the " + "'device_roles' has 'BORDER_NODE' for the device {ip}.".format(ip=device_ip) ) self.status = "failed" return self.check_return_status() @@ -2599,27 +2604,55 @@ def get_device_params(self, fabric_id, network_id, device_details, config_index) ) self.status = "failed" return self.check_return_status() + else: + if have_layer3_settings and (str(local_autonomous_system_number) != str(have_layer3_settings.get("localAutonomousSystemNumber"))): + self.msg = ( + "The parameter 'local_autonomous_system_number' under 'layer3_settings' should not be " + "updated for the device with IP '{ip}'.".format(ip=device_ip) + ) + self.status = "failed" + return self.check_return_status() self.validate_local_autonomous_system_number(local_autonomous_system_number, device_ip) self.log( "Successfully validated 'local_autonomous_system_number': {asn_number}" .format(asn_number=local_autonomous_system_number), "DEBUG" ) - is_default_exit = layer3_settings.get("layer3_settings") - if not is_default_exit: + is_default_exit = layer3_settings.get("is_default_exit") + if is_default_exit is None: if have_layer3_settings: have_is_default_exit = have_layer3_settings.get("isDefaultExit") is_default_exit = have_is_default_exit else: is_default_exit = True + else: + if have_layer3_settings: + have_is_default_exit = have_layer3_settings.get("importExternalRoutes") + if is_default_exit != have_is_default_exit: + self.msg = ( + "The parameter 'is_default_exit' under 'layer3_settings' should not be " + "updated for the device with IP '{ip}'.".format(ip=device_ip) + ) + self.status = "failed" + return self.check_return_status() import_external_routes = layer3_settings.get("import_external_routes") - if not import_external_routes: + if import_external_routes is None: if have_layer3_settings: have_import_external_routes = have_layer3_settings.get("importExternalRoutes") import_external_routes = have_import_external_routes else: import_external_routes = True + else: + if have_layer3_settings: + have_import_external_routes = have_layer3_settings.get("importExternalRoutes") + if import_external_routes != have_import_external_routes: + self.msg = ( + "The parameter 'import_external_routes' under 'layer3_settings' should not be " + "updated for the device with IP '{ip}'.".format(ip=device_ip) + ) + self.status = "failed" + return self.check_return_status() border_priority = layer3_settings.get("border_priority") # Default value of border priority is 10 @@ -3107,7 +3140,7 @@ def get_sda_l3_handoff_params(self, fabric_id, network_id, device_details, devic ) return sda_l3_handoff_info - def validate_layer3_handoff_ip_transit(self, item, device_ip, is_ip_l3_handoff_exists, have_ip_l3_handoff): + def validate_layer3_handoff_ip_transit(self, item, device_ip, is_ip_l3_handoff_exists, have_ip_l3_handoff, l3_ip_handoff_index): """ Validate Layer 3 handoff IP transit parameters. @@ -3116,6 +3149,7 @@ def validate_layer3_handoff_ip_transit(self, item, device_ip, is_ip_l3_handoff_e device_ip (str): The device IP address. is_ip_l3_handoff_exists (int): The existence of the L3 handoff item. have_ip_l3_handoff (dict): Existing L3 handoff details for the device. + l3_ip_handoff_index (int): Index for the current item in the 'have_ip_l3_handoff'. Returns: tuple: A tuple containing transit_id, interface_name, virtual_network_name, vlan_id, tcp_mss_adjustment @@ -3217,9 +3251,9 @@ def validate_layer3_handoff_ip_transit(self, item, device_ip, is_ip_l3_handoff_e self.log(self.msg, "ERROR") return (None, None, None, None, None, False) elif virtual_network_name and (not vlan_id): - vlan_id = have_ip_l3_handoff.get("vlanId") + vlan_id = have_ip_l3_handoff[l3_ip_handoff_index].get("vlanId") elif vlan_id and (not virtual_network_name): - virtual_network_name = have_ip_l3_handoff.get("virtualNetworkName") + virtual_network_name = have_ip_l3_handoff[l3_ip_handoff_index].get("virtualNetworkName") else: if not (virtual_network_name and vlan_id): self.msg = ( @@ -3234,7 +3268,7 @@ def validate_layer3_handoff_ip_transit(self, item, device_ip, is_ip_l3_handoff_e tcp_mss_adjustment = item.get("tcp_mss_adjustment") if not tcp_mss_adjustment: if is_ip_l3_handoff_exists: - have_tcp_mss_adjustment = have_ip_l3_handoff.get("tcpMssAdjustment") + have_tcp_mss_adjustment = have_ip_l3_handoff[l3_ip_handoff_index].get("tcpMssAdjustment") if have_tcp_mss_adjustment: tcp_mss_adjustment = have_tcp_mss_adjustment else: @@ -3322,7 +3356,8 @@ def get_ip_l3_handoff_params(self, fabric_id, network_id, device_details, device (transit_id, interface_name, virtual_network_name, vlan_id, tcp_mss_adjustment, is_valid) = \ self.validate_layer3_handoff_ip_transit( - item, device_details.get("device_ip"), is_ip_l3_handoff_exists, have_ip_l3_handoff + item, device_details.get("device_ip"), is_ip_l3_handoff_exists, + have_ip_l3_handoff, l3_ip_handoff_index ) if not is_valid: From 33ccf55d6e7c0b15ffe4d87bd2a64cd61af6e0e0 Mon Sep 17 00:00:00 2001 From: md-rafeek Date: Tue, 5 Nov 2024 22:59:37 +0530 Subject: [PATCH 23/83] Addressed all 37 comments --- plugins/module_utils/dnac.py | 5 +- .../modules/accesspoint_workflow_manager.py | 268 ++++++++++++------ 2 files changed, 179 insertions(+), 94 deletions(-) diff --git a/plugins/module_utils/dnac.py b/plugins/module_utils/dnac.py index 74e0b42ba..e89c71917 100644 --- a/plugins/module_utils/dnac.py +++ b/plugins/module_utils/dnac.py @@ -1769,10 +1769,9 @@ def set_operation_result(self, operation_status, is_changed, status_message, log self.result.update({ "status": operation_status, "msg": status_message, - "response": status_message, + "response": additional_info or status_message, "changed": is_changed, - "failed": operation_status == "failed", - "data": additional_info or {} # Include additional_info if provided, else an empty dictionary + "failed": operation_status == "failed" }) # Log the message at the specified log level diff --git a/plugins/modules/accesspoint_workflow_manager.py b/plugins/modules/accesspoint_workflow_manager.py index 1ee1ec1de..67f5919fb 100644 --- a/plugins/modules/accesspoint_workflow_manager.py +++ b/plugins/modules/accesspoint_workflow_manager.py @@ -572,12 +572,12 @@ required: false bulk_update_aps: description: | - Bulk update to modify one or more access points (APs) of the same series, - identified by their MAC address, hostname, or management IP address. - At least one of the following parameters is required: + Perform a bulk update on multiple access points (APs) of the same series, + identified by one or more of the following parameters: - mac_address - hostname - management_ip_address + At least one of these parameters must be specified to identify the APs for updating. type: dict required: false suboptions: @@ -1958,15 +1958,14 @@ def get_diff_merged(self, ap_config): if task_details_response.get("endTime") is not None: if task_details_response.get("status") == "FAILURE": self.result["changed"] = True if self.result["changed"] is True else False - self.status = "failed" self.msg = "Unable to get success response, hence AP config not updated" self.log(self.msg, "ERROR") self.log("Task Details: {0} .".format(self.pprint( task_details_response)), "ERROR") - responses["accesspoints_updates"] = { - "ap_update_config_task_details": self.get_task_details_by_id(task_response["response"]["taskId"]), - "ap_config_update_status": self.msg} - self.module.fail_json(msg=self.msg, response=responses) + failure_details = self.get_task_details_by_id(task_response["response"]["taskId"]) + self.log("Failure Details: {0} .".format(self.pprint(failure_details)), "ERROR") + self.set_operation_result("failed", self.result["changed"], + self.msg, "ERROR", failure_details).check_return_status() else: self.result["changed"] = True self.result["ap_update_status"] = True @@ -2211,39 +2210,46 @@ def validate_ap_config_parameters(self, ap_config): ap_identifier = ap_config.get("ap_identifier") common_fields_to_change = ap_config.get("common_fields_to_change") + self.log("Processing AP configuration. ap_identifier: {0}, common_fields_to_change: {1}". + format(ap_identifier, common_fields_to_change), "DEBUG") if ap_identifier is not None: for each_ap in ap_identifier: + self.log("Validating AP entry: {0}".format(each_ap), "DEBUG") mac_address = each_ap.get("mac_address") if mac_address: - mac_regex = re.compile(r'^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$') - if not mac_regex.match(mac_address): - errormsg.append("mac_address: Invalid MAC Address '{0}' in playbook.".format(mac_address)) + self.validate_mac_address(mac_address, errormsg) management_ip_address = each_ap.get("management_ip_address") - if management_ip_address and (not self.is_valid_ipv4(management_ip_address) and - not self.is_valid_ipv6(management_ip_address)): - errormsg.append("management_ip_address: Invalid Management IP Address '{0}' in playbook." - .format(management_ip_address)) + if management_ip_address: + self.validate_ip_address(management_ip_address, "management_ip_address", + errormsg) hostname = each_ap.get("hostname") if hostname: + self.log("Validating Hostname: {0}".format(hostname), "DEBUG") param_spec = dict(type="str", length_max=32) validate_str(hostname, param_spec, "hostname", errormsg) self.log("Hostname validation for '{0}' completed.".format(hostname), "INFO") ap_name = each_ap.get("ap_name") if ap_name: + self.log("Validating AP Name: {0}".format(ap_name), "DEBUG") param_spec = dict(type="str", length_max=32) validate_str(ap_name, param_spec, "ap_name", errormsg) if re.search(r'[ ?<]', ap_name): - errormsg.append("ap_name: Invalid '{0}' in playbook. Space, '?', '<' and XSS characters are not allowed".format(ap_name)) - + errormsg.append("ap_name: Invalid '{0}' in playbook. Space, '?', '<' and XSS characters are not allowed". + format(ap_name)) + else: + self.log("AP Name '{0}' is valid.".format(ap_name), "INFO") if common_fields_to_change is not None: ap_config = common_fields_to_change + self.log("Updated ap_config with common_fields_to_change: {0}".format(common_fields_to_change), "DEBUG") invalid_series = self.validate_radio_series(ap_config) if invalid_series: errormsg.append(invalid_series) + else: + self.log("Radio series validation completed with no errors.", "INFO") mac_address = ap_config.get("mac_address") if mac_address: @@ -2258,9 +2264,12 @@ def validate_ap_config_parameters(self, ap_config): errormsg.append("management_ip_address: Invalid Management IP Address '{0}'\ in playbook.".format(management_ip_address)) - if ap_config.get("rf_profile"): + rf_profile = ap_config.get("rf_profile") + if rf_profile: + self.log("Validating RF profile: {0}".format(rf_profile), "DEBUG") param_spec = dict(type="str", length_max=32) - validate_str(ap_config["rf_profile"], param_spec, "rf_profile", errormsg) + validate_str(rf_profile, param_spec, "rf_profile", errormsg) + self.log("RF profile '{0}' validation completed.".format(rf_profile), "DEBUG") site = ap_config.get("site") if site: @@ -2370,6 +2379,50 @@ def validate_ap_config_parameters(self, ap_config): self.status = "success" return self + def validate_mac_address(self, mac_address, errormsg): + """ + Validates the provided MAC address. + + Parameters: + - self (object): An instance of the class containing the method. + - mac_address (str): The MAC address string to validate in the input configuration. + - errormsg (list): List contain error message of the mac address error. + + Returns: + An error message if validation fails; otherwise, returns Nothing + + Description: + This helper function validates the MAC address provided with the specified field name. + If the MAC address is invalid, it returns an error message containing with field name. + """ + self.log("Validating MAC address: {0}".format(mac_address), "INFO") + mac_regex = re.compile(r'^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$') + + if mac_address and not mac_regex.match(mac_address): + errormsg.append("mac_address: Invalid MAC Address '{0}' in playbook.".format(mac_address)) + else: + self.log("MAC address '{0}' is valid.".format(mac_address), "INFO") + + def validate_ip_address(self, ip_address, ip_type, errormsg): + """ + Validates the provided IP Address. + + Parameters: + - self (object): An instance of the class containing the method. + - ip_address (str): The IP address string to validate in the input configuration. + - ip_type (str): String contain field name needs to display in the error message + - errormsg (list): List contain error message of the mac address error. + + Returns: + An error message if validation fails; otherwise, returns Nothing + """ + self.log("Validating {0}: {1}".format(ip_type, ip_address), "DEBUG") + + if ip_address and not (self.is_valid_ipv4(ip_address) or self.is_valid_ipv6(ip_address)): + errormsg.append("{0}: Invalid {0} '{1}' in playbook.".format(ip_type, ip_address)) + else: + self.log("{0}: '{1}' is valid.".format(ip_type, ip_address), "INFO") + def validate_radio_parameters(self, radio_config, radio_series, errormsg): """ Validate the Radio configuration each parameter. @@ -2608,23 +2661,31 @@ def get_accesspoint_details(self, input_config): input_param[self.keymap[key]] = input_config[key] break - if input_config.get("ap_identifier"): + ap_identifier = input_config.get("ap_identifier") + + if ap_identifier: ap_list = [] selected_key = None - for each_ap in input_config.get("ap_identifier"): + self.log("Starting to process AP identifiers from input configuration.", "DEBUG") + + for each_ap in ap_identifier: for key in ["mac_address", "management_ip_address", "hostname"]: if each_ap.get(key): ap_list.append(each_ap[key]) selected_key = key + self.log("Selected key '{0}' with value '{1}' for AP identifier.". + format(key, each_ap[key]), "DEBUG") break input_param[self.keymap[selected_key]] = ap_list self.log("At AP details {0}".format(self.pprint(input_param))) + self.log("Completed AP identifier processing. Mapped {0} to input_param with values: {1}". + format(self.keymap[selected_key], ap_list), "DEBUG") + self.log("Final AP details structure: {0}".format(self.pprint(input_param)), "DEBUG") if not input_param: msg = "Required param of mac_address,ip_address or hostname is not in playbook config" self.log(msg, "WARNING") - self.module.fail_json(msg=msg, response=msg) - return (accesspoint_exists, current_configuration) + self.set_operation_result("failed", False, msg, "ERROR").check_return_status() try: ap_response = self.dnac._exec( @@ -2634,14 +2695,19 @@ def get_accesspoint_details(self, input_config): params=input_param, ) - if ap_response and ap_response.get("response") and not input_config.get("ap_identifier"): - ap_response = self.camel_to_snake_case(ap_response["response"]) - accesspoint_exists = True - current_configuration = ap_response[0] - elif ap_response and ap_response.get("response") and input_config.get("ap_identifier"): - ap_response = self.camel_to_snake_case(ap_response["response"]) + ap_response_data = ap_response.get("response") if ap_response else None + ap_identifier_present = input_config.get("ap_identifier") + + if ap_response_data: + ap_response = self.camel_to_snake_case(ap_response_data) accesspoint_exists = True + if not ap_identifier_present: + current_configuration = ap_response[0] + self.log("AP response found without 'ap_identifier'; current configuration set.", "DEBUG") + else: + self.log("AP response found with 'ap_identifier'; processed AP response data.", "DEBUG") + except Exception as e: self.msg = "The provided device '{0}' is either invalid or not present in the \ Cisco Catalyst Center.".format(str(input_param)) @@ -2650,12 +2716,12 @@ def get_accesspoint_details(self, input_config): if not accesspoint_exists: self.msg = "The provided device '{0}' is either invalid or not present in the \ Cisco Catalyst Center.".format(str(input_param)) - self.module.fail_json(msg="MAC Address not exist:", response=str(self.msg)) + self.set_operation_result("failed", False, self.msg, "ERROR").check_return_status() else: - if not input_config.get("ap_identifier") and current_configuration["family"] != "Unified AP": - self.msg = "Provided device is not Access Point." - self.module.fail_json(msg="MAC Address is not Access point") - elif input_config.get("ap_identifier"): + ap_identifier_present = input_config.get("ap_identifier") + is_unified_ap = current_configuration.get("family") == "Unified AP" + + if ap_identifier_present: filter_response = [] for each_response in ap_response: if each_response["family"] != "Unified AP": @@ -2666,6 +2732,10 @@ def get_accesspoint_details(self, input_config): self.log("Filtered Access points List: {0} ".format(self.pprint(filter_response)), "INFO") return accesspoint_exists, filter_response + if not is_unified_ap: + self.msg = "Provided device is not Access Point." + self.set_operation_result("failed", False, self.msg, "ERROR").check_return_status() + return accesspoint_exists, current_configuration def get_current_config(self, input_config): @@ -2850,14 +2920,13 @@ def site_exists(self, input_config): msg = "The provided site name '{0}' is either invalid or not present in the \ Cisco Catalyst Center.".format(self.want.get("site_name")) self.log(msg, "WARNING") - self.module.fail_json(msg=msg) + self.set_operation_result("failed", False, msg, "ERROR").check_return_status() except Exception as e: msg = "The provided site name '{0}' is either invalid or not present in the \ Cisco Catalyst Center.".format(self.want.get("site_name")) self.log(msg + str(e), "WARNING") - self.module.fail_json(msg=msg) - return site_exists, None + self.set_operation_result("failed", False, msg, "ERROR").check_return_status() return site_exists, current_site @@ -2940,7 +3009,8 @@ def verify_ap_provision(self, wlc_ip_address): self.log(msg + str(e), "ERROR") provision_details = str(e) self.status = "failed" - self.module.fail_json(msg=msg, response=provision_details) + self.set_operation_result("failed", False, msg, "ERROR", + provision_details).check_return_status() return provision_status, provision_details @@ -2969,7 +3039,7 @@ def access_point_provision_old(self, rf_profile, hostname, type_name, site_name_ "site_name_hierarchy: {0}, rf_profile: {1}, host_name: {2}" .format(site_name_hierarchy, rf_profile, hostname)) self.log(error_msg, "ERROR") - self.module.fail_json(msg=error_msg) + self.set_operation_result("failed", False, error_msg, "ERROR").check_return_status() provision_params = [{ "rfProfile": rf_profile, @@ -3014,7 +3084,7 @@ def access_point_provision_new(self, rf_profile, device_id, site_id): "device_id: {0}, rf_profile: {1}, site_id: {2}" .format(device_id, rf_profile, site_id)) self.log(error_msg, "ERROR") - self.module.fail_json(msg=error_msg) + self.set_operation_result("failed", False, error_msg, "ERROR").check_return_status() provision_params = { "rfProfileName": rf_profile, @@ -3044,8 +3114,7 @@ def access_point_provision_new(self, rf_profile, device_id, site_id): except Exception as e: error_msg = 'An error occurred during device provisioning: {0}'.format(str(e)) self.log(error_msg, "ERROR") - self.msg = error_msg - self.module.fail_json(msg=self.msg) + self.set_operation_result("failed", False, error_msg, "ERROR").check_return_status() def provision_device(self): """ @@ -3090,8 +3159,9 @@ def provision_device(self): provision_details = execution_details break if execution_details.get("bapiError"): - self.module.fail_json(msg=execution_details.get("bapiError"), - response=execution_details) + msg=execution_details.get("bapiError") + self.set_operation_result("failed", False, msg, "ERROR", + execution_details).check_return_status() break time.sleep(resync_retry_interval) @@ -3121,20 +3191,15 @@ def provision_device(self): break else: self.result['changed'] = True if self.result['changed'] is True else False - self.status = "failed" self.msg = "Unable to get success response, hence not provisioned" self.log(self.msg, "ERROR") self.log("Task Details: {0} .".format(self.pprint( task_details_response)), "ERROR") provision_details = self.get_task_details_by_id(task_id) - responses["accesspoints_updates"] = { - "ap_provision_task_details": { - "error_code": provision_details.get("errorCode"), - "failure_reason": provision_details.get("failureReason"), - "data": provision_details.get("data") - }, - "ap_provision_status": self.msg} - self.module.fail_json(msg=self.msg, response=responses) + self.log("Provision error details: {0} .".format(self.pprint( + provision_details)), "ERROR") + self.set_operation_result("failed", self.result['changed'], + self.msg, "ERROR", provision_details).check_return_status() time.sleep(resync_retry_interval) resync_retry_count = resync_retry_count - 1 @@ -3410,21 +3475,36 @@ def update_ap_configuration(self, ap_config): if ap_config["adminStatus"] == "Enabled" else False if not ap_config.get("bulk_update"): - if ap_config.get(self.keymap["ap_name"]) is not None: - temp_dict[self.keymap["ap_name"]] = ap_config.get(self.keymap["ap_name"]) + ap_name = ap_config.get(self.keymap["ap_name"]) + mac_address = ap_config.get(self.keymap["mac_address"]) + if ap_name is not None: + temp_dict[self.keymap["ap_name"]] = ap_name temp_dict["apNameNew"] = ap_config["apNameNew"] - temp_dict[self.keymap["mac_address"]] = ap_config[self.keymap["mac_address"]] + temp_dict[self.keymap["mac_address"]] = mac_address + self.log("Populated temp_dict with AP name: {0}, MAC address: {1}".format(ap_name, + mac_address), "DEBUG") del ap_config[self.keymap["ap_name"]] del ap_config["apNameNew"] - elif ap_config.get(self.keymap["mac_address"]) is not None: - temp_dict[self.keymap["mac_address"]] = ap_config.get(self.keymap["mac_address"]) + elif mac_address is not None: + temp_dict[self.keymap["mac_address"]] = mac_address + self.log("Populated temp_dict with MAC address: {0}".format(mac_address), "DEBUG") + else: + self.log("No AP name or MAC address found in ap_config.", "WARNING") ap_config["apList"].append(temp_dict) + self.log("Added temp_dict to apList. Current apList: {0}". + format(self.pprint(ap_config["apList"])), "INFO") else: ap_config["apList"] = ap_config.get("ap_list") + self.log("Bulk update detected. Setting apList from ap_list. Current apList: {0}". + format(self.pprint(ap_config["apList"])), "INFO") del ap_config["ap_list"] + self.log("Removed ap_list from ap_config.", "DEBUG") if ap_config.get(self.keymap["ap_name"]) and ap_config.get("apNameNew"): del ap_config[self.keymap["ap_name"]] del ap_config["apNameNew"] + self.log("Removed old AP name and new AP name from ap_config.", "DEBUG") + else: + self.log("No AP name or new AP name to remove from ap_config.", "DEBUG") if ap_config.get(self.keymap["location"]) is not None: ap_config["configureLocation"] = True @@ -3726,7 +3806,6 @@ def reboot_access_point(self, ap_list): return self self.result['changed'] = False - self.status = "failed" self.msg = "Unable to get success response, hence APs are not rebooted" self.log(self.msg, "ERROR") self.log("Reboot Task Details: {0} .".format(self.pprint( @@ -3738,21 +3817,19 @@ def reboot_access_point(self, ap_list): "reboot_api_response": reboot_status["apList"] }, "ap_reboot_status": self.msg} - self.module.fail_json(msg=self.msg, response=responses) + self.set_operation_result("failed", self.result['changed'], + self.msg, "ERROR", responses).check_return_status() time.sleep(resync_retry_interval) resync_retry_count = resync_retry_count - 1 else: self.msg = "Failed to receive a valid response from AP reboot API." self.log(self.msg, "ERROR") - self.status = "failed" - self.module.fail_json(msg=self.msg) + self.set_operation_result("failed", False, self.msg, "ERROR").check_return_status() except Exception as e: error_msg = 'An error occurred during access point reboot: {0}'.format(str(e)) self.log(error_msg, "ERROR") - self.msg = error_msg - self.status = "failed" - self.module.fail_json(msg=self.msg) + self.set_operation_result("failed", False, error_msg, "ERROR").check_return_status() def access_point_reboot_status(self, task_id): """ @@ -3777,13 +3854,11 @@ def access_point_reboot_status(self, task_id): return response[0] error_msg = "Invalid response format or missing data in AP reboot status." self.log(error_msg, "ERROR") - self.module.fail_json(msg=error_msg) + self.set_operation_result("failed", False, error_msg, "ERROR").check_return_status() except Exception as e: error_msg = 'An error occurred during access point reboot status: {0}'.format(str(e)) self.log(error_msg, "ERROR") - self.msg = error_msg - self.status = "failed" - self.module.fail_json(msg=error_msg) + self.set_operation_result("failed", False, error_msg, "ERROR").check_return_status() def reset_access_point(self, ap_list): """ @@ -3809,15 +3884,14 @@ def reset_access_point(self, ap_list): if not (response or isinstance(response, dict)): self.msg = "Failed to receive a valid response from 'factory_reset_access_points' API." self.log(self.msg, "ERROR") - self.status = "failed" - self.module.fail_json(msg=self.msg) + self.set_operation_result("failed", False, self.msg, "ERROR").check_return_status() task_id = response.get("response", {}).get("taskId") if not task_id: self.msg = "Failed to retrieve task id from 'factory_reset_access_points' API response." self.log(self.msg, "ERROR") - self.status = "failed" - self.module.fail_json(msg=self.msg) + self.set_operation_result("failed", False, self.msg, "ERROR").check_return_status() + resync_retry_count = int(self.payload.get("dnac_api_task_timeout")) resync_retry_interval = int(self.payload.get("dnac_task_poll_interval")) @@ -3843,29 +3917,27 @@ def reset_access_point(self, ap_list): .format(ap_list, self.pprint(task_details_response)), "INFO") return self - self.result['changed'] = False - self.status = "failed" self.msg = "Unable to get success response, hence APs are not resetted" self.log(self.msg, "ERROR") self.log("Reset Task Details: {0} .".format(self.pprint( task_details_response)), "ERROR") reset_status = self.access_point_reset_status(task_id) + self.log("Reset Error Details: {0} .".format(self.pprint(reset_status)), "ERROR") responses["accesspoints_updates"] = { "ap_reset_task_details": { "status": task_details_response.get("status"), "reset_api_response": reset_status.get("apResponseInfoList") }, "ap_reset_status": self.msg} - self.module.fail_json(msg=self.msg, response=responses) + self.set_operation_result("failed", False, self.msg, "ERROR", responses).check_return_status() + time.sleep(resync_retry_interval) resync_retry_count = resync_retry_count - 1 except Exception as e: error_msg = 'An error occurred during access point reset: {0}'.format(str(e)) self.log(error_msg, "ERROR") - self.msg = error_msg - self.status = "failed" - self.module.fail_json(msg=self.msg) + self.set_operation_result("failed", False, error_msg, "ERROR").check_return_status() def access_point_reset_status(self, task_id): """ @@ -3890,13 +3962,12 @@ def access_point_reset_status(self, task_id): error_msg = "Invalid response format or missing data in AP reset status." self.log(error_msg, "ERROR") - self.module.fail_json(msg=error_msg) + self.set_operation_result("failed", False, error_msg, "ERROR").check_return_status() + except Exception as e: error_msg = 'An error occurred during access point reset status: {0}'.format(str(e)) self.log(error_msg, "ERROR") - self.msg = error_msg - self.status = "failed" - self.module.fail_json(msg=error_msg) + self.set_operation_result("failed", False, error_msg, "ERROR").check_return_status() def reboot_factory_reset_function(self, ap_list, reboot_or_reset): """ @@ -3914,7 +3985,7 @@ def reboot_factory_reset_function(self, ap_list, reboot_or_reset): if ap_indentity and len(ap_list.get(ap_indentity)) > 100: error_msg = "Maximum allowed AP list 100 but passed {0}".format(str(len(ap_list.get(ap_indentity)))) self.log(error_msg, "ERROR") - self.module.fail_json(msg=error_msg) + self.set_operation_result("failed", False, error_msg, "ERROR").check_return_status() if ap_indentity and ap_indentity in self.keymap and len(ap_list.get(ap_indentity)) > 0: eth_mac_list = [] @@ -3951,7 +4022,10 @@ def bulk_ap_update(self, bulk_config): ap_update_list = [] common_config = {} ap_output_list = [] + if ap_exist and len(ap_details) > 0: + self.log("Access points exist. Total count: {0}".format(str(len(ap_details))), "INFO") + for each_ap in ap_details: ap_config_exists, ap_configuration = self.get_accesspoint_config(each_ap["ap_ethernet_mac_address"]) self.log("Access point configuration exists: {0}, Current configuration: {1}" @@ -3962,11 +4036,18 @@ def bulk_ap_update(self, bulk_config): if (each_ap["mac_address"] == ap.get('mac_address') or each_ap["hostname"] == ap.get('hostname') or each_ap["management_ip_address"] == ap.get('management_ip_address'))] - self.want["ap_name"] = ap_name[0] - ap_output_list.append(ap_name[0]) + if ap_name: + self.want["ap_name"] = ap_name[0] + ap_output_list.append(ap_name[0]) + self.log("Identified AP name: {0}".format(ap_name[0]), "INFO") + else: + self.log("No matching AP name found for MAC: {0}".format(each_ap["mac_address"]), "WARNING") + self.log("Access point WANT configuration exists: {0}, Current configuration: {1}" .format(ap_config_exists, self.pprint(self.want)), "INFO") consolidated_config = self.config_diff(ap_configuration) + self.log("Consolidated configuration for AP {0}: {1}".format(each_ap["mac_address"], + self.pprint(consolidated_config)), "DEBUG") temp_dict = {} if consolidated_config.get(self.keymap["ap_name"]) is not None: @@ -3976,10 +4057,12 @@ def bulk_ap_update(self, bulk_config): elif consolidated_config.get(self.keymap["mac_address"]) is not None: temp_dict[self.keymap["mac_address"]] = consolidated_config.get(self.keymap["mac_address"]) ap_update_list.append(temp_dict) + self.log("Temp dict for AP {0}: {1}".format(each_ap["mac_address"], self.pprint(temp_dict)), "DEBUG") common_config.update(consolidated_config) common_config["bulk_update"] = True common_config["ap_list"] = ap_update_list + self.log("Common configuration for bulk update: {0}".format(self.pprint(common_config)), "INFO") task_response = self.update_ap_configuration(common_config) self.log("Access Point update response: {0} .".format(task_response), "INFO") @@ -3987,6 +4070,8 @@ def bulk_ap_update(self, bulk_config): if task_response and isinstance(task_response, dict): resync_retry_count = self.payload.get("dnac_api_task_timeout") resync_retry_interval = self.payload.get("dnac_task_poll_interval") + self.log("Starting task polling with timeout: {0} and interval: {1}". + format(str(resync_retry_count), str(resync_retry_interval)), "INFO") while resync_retry_count: task_details_response = self.get_tasks_by_id( task_response["response"]["taskId"]) @@ -3995,14 +4080,13 @@ def bulk_ap_update(self, bulk_config): if task_details_response.get("endTime") is not None: if task_details_response.get("status") == "FAILURE": self.result["changed"] = True if self.result["changed"] is True else False - self.status = "failed" self.msg = "Unable to get success response, hence AP config not updated" self.log(self.msg, "ERROR") self.log("Task Details: {0} .".format(self.pprint(task_details_response)), "ERROR") - responses["accesspoints_updates"] = { - "ap_update_config_task_details": self.get_task_details_by_id(task_response["response"]["taskId"]), - "ap_config_update_status": self.msg} - self.module.fail_json(msg=self.msg, response=responses) + failure_response = self.get_task_details_by_id(task_response["response"]["taskId"]) + self.log("Failure Details: {0} .".format(self.pprint(failure_response)), "ERROR") + self.set_operation_result("failed", self.result["changed"], + self.msg, "ERROR", failure_response).check_return_status() else: self.result["changed"] = True self.result["ap_update_status"] = True @@ -4017,6 +4101,8 @@ def bulk_ap_update(self, bulk_config): self.result["ap_update_msg"] = self.msg break + self.log("Polling task status, waiting for {0} seconds before the next check...". + format(str(resync_retry_interval)), "DEBUG") time.sleep(resync_retry_interval) resync_retry_count = resync_retry_count - 1 From 6f7495e991c4c5fdf7f453a1afe2fadde67348c4 Mon Sep 17 00:00:00 2001 From: md-rafeek Date: Tue, 5 Nov 2024 23:16:25 +0530 Subject: [PATCH 24/83] Sanity error fixed --- plugins/modules/accesspoint_workflow_manager.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plugins/modules/accesspoint_workflow_manager.py b/plugins/modules/accesspoint_workflow_manager.py index 67f5919fb..11b24c9d9 100644 --- a/plugins/modules/accesspoint_workflow_manager.py +++ b/plugins/modules/accesspoint_workflow_manager.py @@ -3159,7 +3159,7 @@ def provision_device(self): provision_details = execution_details break if execution_details.get("bapiError"): - msg=execution_details.get("bapiError") + msg = execution_details.get("bapiError") self.set_operation_result("failed", False, msg, "ERROR", execution_details).check_return_status() break @@ -4086,7 +4086,7 @@ def bulk_ap_update(self, bulk_config): failure_response = self.get_task_details_by_id(task_response["response"]["taskId"]) self.log("Failure Details: {0} .".format(self.pprint(failure_response)), "ERROR") self.set_operation_result("failed", self.result["changed"], - self.msg, "ERROR", failure_response).check_return_status() + self.msg, "ERROR", failure_response).check_return_status() else: self.result["changed"] = True self.result["ap_update_status"] = True From d27fcc0d76abaa24509ab1be4c9b83ae5117136d Mon Sep 17 00:00:00 2001 From: Syed-khadeerahmed Date: Wed, 6 Nov 2024 14:21:34 +0530 Subject: [PATCH 25/83] code is in progress --- playbooks/swim_workflow_manager.yml | 19 ++- plugins/modules/swim_workflow_manager.py | 179 ++++++++++++++++------- 2 files changed, 137 insertions(+), 61 deletions(-) diff --git a/playbooks/swim_workflow_manager.yml b/playbooks/swim_workflow_manager.yml index 481024940..dcadb3793 100644 --- a/playbooks/swim_workflow_manager.yml +++ b/playbooks/swim_workflow_manager.yml @@ -22,9 +22,16 @@ dnac_api_task_timeout: 1000 dnac_task_poll_interval: 1 config: - - tagging_details: - image_name: cat9k_iosxe.17.15.01.SPA.bin - device_role: ("core","Distribution") - device_image_family_name: Cisco Catalyst 9300 Switch - # site_name: Global/Chennai/LTTS/FLOOR1 - tagging: True + - image_distribution_details: + site_name: Global/Chennai/LTTS/FLOOR1 + device_image_family_name: Cisco Catalyst 9300 Switch + device_role: CORE + device_family_name: Switches and Hubs + device_series_name: Cisco Catalyst 9300 Series Switches + + # - tagging_details: + # image_name: cat9k_iosxe.17.15.01.SPA.bin + # device_role: ["core"] + # device_image_family_name: Cisco Catalyst 9300 Switch + # # site_name: Global/Chennai/LTTS/FLOOR1 + # tagging: True diff --git a/plugins/modules/swim_workflow_manager.py b/plugins/modules/swim_workflow_manager.py index ffe2e5ca3..b618c6e76 100644 --- a/plugins/modules/swim_workflow_manager.py +++ b/plugins/modules/swim_workflow_manager.py @@ -192,6 +192,8 @@ CORE - This role typically represents high-capacity switches or routers that form the backbone of the network. They handle large volumes of traffic and provide connectivity between different parts of network, such as connecting distribution switches or providing interconnection between different network segments. + Only the end-state device role will be marked as golden. For example, if a device is already marked as golden for ACCESS, and it needs to be marked + as golden for DISTRIBUTION and CORE, the system will first un-tag the golden status for ACCESS before marking DISTRIBUTION and CORE as golden. type: str device_image_family_name: description: Device Image family name(Eg Cisco Catalyst 9300 Switch) @@ -462,6 +464,25 @@ site_name: Global/USA/San Francisco/BGL_18 tagging: True +- name: Tag the specified image as golden for multiple device roles and load it onto the device + cisco.dnac.swim_workflow_manager: + dnac_host: "{{dnac_host}}" + dnac_username: "{{dnac_username}}" + dnac_password: "{{dnac_password}}" + dnac_verify: "{{dnac_verify}}" + dnac_port: "{{dnac_port}}" + dnac_version: "{{dnac_version}}" + dnac_debug: "{{dnac_debug}}" + dnac_log_level: "{{dnac_log_level}}" + dnac_log: True + config: + - tagging_details: + image_name: cat9k_iosxe.17.12.01.SPA.bin + device_role: ("ACCESS","DISTRIBUTION") + device_image_family_name: Cisco Catalyst 9300 Switch + site_name: Global/USA/San Francisco/BGL_18 + tagging: True + - name: Un-tagged the given image as golden and load it on device cisco.dnac.swim_workflow_manager: dnac_host: "{{dnac_host}}" @@ -1144,6 +1165,10 @@ def get_have(self): have = {} distribution_details = self.want.get("distribution_details") site_name = distribution_details.get("site_name") + + family_name = distribution_details.get("device_image_family_name") + self.get_device_family_identifier(family_name) + if site_name: site_exists = False (site_exists, site_id) = self.site_exists(site_name) @@ -1151,7 +1176,8 @@ def get_have(self): if site_exists: have["site_id"] = site_id self.log("Site '{0}' exists and has the site ID: {1}".format(site_name, str(site_id)), "DEBUG") - + golden_tagged_images_id = self.get_golden_image_id(site_id) + self.log(golden_tagged_images_id) # check if image for distributon is available if distribution_details.get("image_name"): name = distribution_details.get("image_name").split("/")[-1] @@ -1161,9 +1187,9 @@ def get_have(self): elif self.have.get("imported_image_id"): have["distribution_image_id"] = self.have.get("imported_image_id") - else: - self.log("Image details required for distribution have not been provided", "ERROR") - self.module.fail_json(msg="Image details required for distribution have not been provided", response=[]) + # else: + # self.log("Image details required for distribution have not been provided", "ERROR") + # self.module.fail_json(msg="Image details required for distribution have not been provided", response=[]) device_params = { "hostname": distribution_details.get("device_hostname"), @@ -1503,6 +1529,55 @@ def get_diff_import(self): return self + def get_golden_image_id(self, site_id): + distribution_details = self.want.get("distribution_details") + distribution_device_role = distribution_details.get("device_role") + self.log(self.want.get("device_role")) + golden_tagged_image_ids = [] + final_golden_tagged_image = [] + self.log("Inside get_golden_image_id function", "DEBUG") + + image_response = self.dnac._exec( + family="software_image_management_swim", + function="get_software_image_details", + params={"isTaggedGolden": True}, + ) + + if not image_response or "response" not in image_response: + self.log("Empty or invalid response received from 'get_software_image_details'", "ERROR") + return golden_tagged_image_ids + + self.log("Received API response from 'get_software_image_details': {0}".format(image_response), "DEBUG") + + for item in image_response.get("response", []): + image_uuid = item.get("imageUuid") + if image_uuid: + golden_tagged_image_ids.append(image_uuid) + image_params = { + "image_id": image_uuid, + "site_id": site_id, + "device_family_identifier": self.have.get("device_family_identifier"), + "device_role": distribution_device_role + } + self.log(image_params) + + golden_tag_response = self.dnac._exec( + family="software_image_management_swim", + function="get_golden_tag_status_of_an_image", + op_modifies=True, + params=image_params + ) + self.log(golden_tag_response) + if golden_tag_response: + if golden_tag_response.get("response").get("taggedGolden") is True: + final_golden_tagged_image.append(image_uuid) + self.log(final_golden_tagged_image) + + self.log("Golden tagged image IDs: {0}".format(golden_tagged_image_ids), "DEBUG") + self.log("specific golden tagged image for the given parameter is {0}".format(final_golden_tagged_image)) + + return final_golden_tagged_image + def get_diff_tagging(self): """ Tag or untag a software image as golden based on provided tagging details. @@ -1513,7 +1588,7 @@ def get_diff_tagging(self): Description: This function tags or untags a software image as a golden image in Cisco Catalyst Center based on the provided tagging details. The tagging action is determined by the value of the 'tagging' attribute - in the 'tagging_details' dictionary.If 'tagging' is True, the image is tagged as golden, and if 'tagging' + in the 'tagging_details' dictionary. If 'tagging' is True, the image is tagged as golden, and if 'tagging' is False, the golden tag is removed. The function sends the appropriate request to Cisco Catalyst Center and updates the task details in the 'result' dictionary. If the operation is successful, 'changed' is set to True. """ @@ -1521,11 +1596,28 @@ def get_diff_tagging(self): tagging_details = self.want.get("tagging_details") tag_image_golden = tagging_details.get("tagging") image_name = self.get_image_name_from_id(self.have.get("tagging_image_id")) - device_role=tagging_details.get("device_role", "ALL") + device_role = tagging_details.get("device_role", "ALL") + + if isinstance(device_role, str): + device_role = [device_role] + already_un_tagged_device_role = [] already_tagged_device_role = [] - for role in device_role.replace('(', '').replace(')', '').replace('"', '').split(','): + device_roles = ["core", "distribution", "access", "border router", "unknown", "all"] + + for role in device_role: + if role.lower() not in device_roles: + self.status = "failed" + self.msg = ( + "Validation Error: The specified device role '{0}' is not recognized. " + "Please ensure the role matches one of the known device roles: {1}." + ).format(role, ", ".join(device_roles)) + self.log(self.msg, "ERROR") + self.result['response'] = self.msg + self.check_return_status() + + for role in device_role: image_params = { "image_id": self.have.get("tagging_image_id"), "site_id": self.have.get("site_id"), @@ -1533,35 +1625,32 @@ def get_diff_tagging(self): "device_role": role.upper() } self.log(image_params) - + response = self.dnac._exec( family="software_image_management_swim", function="get_golden_tag_status_of_an_image", op_modifies=True, params=image_params ) - self.log(f"Received API response from 'get_golden_tag_status_of_an_image': {str(response)}", "DEBUG") + self.log("Received API response from 'get_golden_tag_status_of_an_image': {0}".format(str(response)), "DEBUG") api_response = response.get('response') if api_response: image_status = api_response.get('taggedGolden') - + if image_status == tag_image_golden: - msg = f"SWIM Image '{image_name}' already tagged as Golden image in Cisco Catalyst Center" + msg = "SWIM Image '{0}' already tagged as Golden image in Cisco Catalyst Center".format(image_name) self.log(msg, "INFO") already_tagged_device_role.append(role) - + if not image_status and image_status == tag_image_golden: - msg = f"SWIM Image '{image_name}' already un-tagged from Golden image in Cisco Catalyst Center" + msg = "SWIM Image '{0}' already un-tagged from Golden image in Cisco Catalyst Center".format(image_name) self.log(msg, "INFO") already_un_tagged_device_role.append(role) # Check if all roles are tagged as Golden - device_role=tagging_details.get("device_role", "ALL") - device_role_list = device_role.replace('(', '').replace(')', '').replace('"', '').split(',') - device_role = ', '.join(role.strip() for role in device_role_list) if tag_image_golden: - if len(already_tagged_device_role) == len(device_role.split(',')): + if len(already_tagged_device_role) == len(device_role): self.status = "success" self.result['changed'] = False self.msg = "SWIM Image '{0}' already tagged as Golden image in Cisco Catalyst Center for the roles - {1}.".format(image_name, device_role) @@ -1570,7 +1659,7 @@ def get_diff_tagging(self): self.log(self.msg, "INFO") return self else: - if len(already_un_tagged_device_role) == len(device_role.split(',')): + if len(already_un_tagged_device_role) == len(device_role): self.log("inside logic") self.status = "success" self.result['changed'] = False @@ -1579,8 +1668,6 @@ def get_diff_tagging(self): self.result['response'] = self.msg self.log(self.msg, "INFO") return self - -#-------------------------------------------------------------------------------------------------------------# if tag_image_golden: image_param = dict( @@ -1602,24 +1689,14 @@ def get_diff_tagging(self): self.status = "failed" self.msg = "Did not get the response of API so cannot check the Golden tagging status of image - {0}".format(image_name) - device_role=tagging_details.get("device_role", "ALL") - - for role in device_role.replace('(', '').replace(')', '').replace('"', '').split(','): - image_params = dict( - imageId=self.have.get("tagging_image_id"), - siteId=self.have.get("site_id"), - deviceFamilyIdentifier=self.have.get("device_family_identifier"), - deviceRole=role.upper() - ) - self.log(f"Parameters for tagging the image as golden for role {role}: {str(image_params)}", "INFO") - + for role in device_role: image_params = dict( imageId=self.have.get("tagging_image_id"), siteId=self.have.get("site_id"), deviceFamilyIdentifier=self.have.get("device_family_identifier"), deviceRole=role.upper() ) - self.log("Parameters for tagging the image as golden: {0}".format(str(image_params)), "INFO") + self.log("Parameters for tagging the image as golden for role {0}: {1}".format(role, str(image_params)), "INFO") response = self.dnac._exec( family="software_image_management_swim", @@ -1629,16 +1706,16 @@ def get_diff_tagging(self): ) self.log("Received API response from 'tag_as_golden_image': {0}".format(str(response)), "DEBUG") - else: - for role in device_role.replace('(', '').replace(')', '').replace('"', '').split(','): - # Create parameters for the API call + else: + for role in device_role: + self.log(role) image_params = { "image_id": self.have.get("tagging_image_id"), "site_id": self.have.get("site_id"), "device_family_identifier": self.have.get("device_family_identifier"), "device_role": role.upper() } - self.log(f"Parameters for un-tagging the image as golden for role {role}: {str(image_params)}", "INFO") + self.log("Parameters for un-tagging the image as golden for role {0}: {1}".format(role, str(image_params)), "INFO") response = self.dnac._exec( family="software_image_management_swim", @@ -1646,8 +1723,7 @@ def get_diff_tagging(self): op_modifies=True, params=image_params ) - self.log(f"Received API response from 'remove_golden_tag_for_image': {str(response)}", "DEBUG") - + self.log("Received API response from 'remove_golden_tag_for_image': {0}".format(str(response)), "DEBUG") if not response: self.status = "failed" @@ -1659,10 +1735,8 @@ def get_diff_tagging(self): task_details = {} task_id = response.get("response").get("taskId") - device_family=tagging_details.get("device_image_family_name") - device_role=tagging_details.get("device_role", "ALL") - device_role_list = device_role.replace('(', '').replace(')', '').replace('"', '').split(',') - device_role = ', '.join(role.strip() for role in device_role_list) + device_family = tagging_details.get("device_image_family_name") + device_role = tagging_details.get("device_role", "ALL") site_name = tagging_details.get("site_name") if not site_name: @@ -1699,19 +1773,11 @@ def get_diff_tagging(self): self.msg = failure_reason self.result['msg'] = failure_reason self.log(self.msg, "ERROR") - self.result['response'] = self.msg - break - else: - error_message = task_details.get("failureReason", "Error: while tagging/un-tagging the golden swim image.") - self.status = "failed" - self.msg = error_message - self.result['msg'] = error_message - self.log(self.msg, "ERROR") - self.result['response'] = self.msg break return self + def get_device_ip_from_id(self, device_id): """ Retrieve the management IP address of a device from Cisco Catalyst Center using its ID. @@ -2175,7 +2241,10 @@ def verify_diff_tagged(self): image_name = self.get_image_name_from_id(image_id) device_role = tagging_details.get("device_role", "ALL") - for role in device_role.replace('(', '').replace(')', '').replace('"', '').split(','): + if isinstance(device_role, str): + device_role = [device_role] + + for role in device_role: image_params = dict( image_id=self.have.get("tagging_image_id"), site_id=self.have.get("site_id"), @@ -2370,9 +2439,9 @@ def main(): ccc_swims.get_want(config).check_return_status() ccc_swims.get_diff_import().check_return_status() ccc_swims.get_have().check_return_status() - ccc_swims.get_diff_state_apply[state](config).check_return_status() - if config_verify: - ccc_swims.verify_diff_state_apply[state](config).check_return_status() + # ccc_swims.get_diff_state_apply[state](config).check_return_status() + # if config_verify: + # ccc_swims.verify_diff_state_apply[state](config).check_return_status() module.exit_json(**ccc_swims.result) From e957deb5dd3fad11a988391094e08143151129e7 Mon Sep 17 00:00:00 2001 From: Madhan Date: Wed, 6 Nov 2024 21:51:26 +0530 Subject: [PATCH 26/83] Changes in workflow manager modules --- changelogs/changelog.yaml | 15 +++++++++++++++ galaxy.yml | 2 +- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 8d3f5fb60..82b98e8d6 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -1064,3 +1064,18 @@ releases: - Changes in dnac.py - inventory_workflow_manager.py - added attribute hostnames, serial_numbers and mac_addresses - inventory_workflow_manager.py - Removed attribute hostname_list, serial_number_list and mac_address_list + 6.23.0: + release_date: "2024-11-06" + changes: + release_summary: Code changes in workflow manager modules. + minor_changes: + - Changes in circleci to run test cases in integration branch + - Added support for bulk operations on multiple access points in accesspoint_workflow_manager + - Bug fixes in inventory_workflow_manager + - Enhancements in sda_fabric_devices_workflow_manager.py to support route distribution protocol + - Enhancements in sda_fabric_sites_zones_workflow_manager.py + - Bug fixes in sda_fabric_virtual_networks_workflow_manager.py + - Changes in site_workflow_manager + - accesspoint_workflow_manager - added attribute bulk_update_aps + - sda_fabric_devices_workflow_manager.py - added attribute route_distribution_protocol + - sda_fabric_sites_zones_workflow_manager.py - added attribute site_name_hierarchy and removed attribute site_name diff --git a/galaxy.yml b/galaxy.yml index eee72fc97..f719fc0d8 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -1,7 +1,7 @@ --- namespace: cisco name: dnac -version: 6.22.0 +version: 6.23.0 readme: README.md authors: - Rafael Campos From 489d11b469963d800df92a7382329b6c6aae59e7 Mon Sep 17 00:00:00 2001 From: Syed-khadeerahmed Date: Thu, 7 Nov 2024 15:49:02 +0530 Subject: [PATCH 27/83] bug fixed - dev testing done QA testing starting --- plugins/modules/inventory_workflow_manager.py | 21 +++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/plugins/modules/inventory_workflow_manager.py b/plugins/modules/inventory_workflow_manager.py index 1c296efc3..50680ade6 100644 --- a/plugins/modules/inventory_workflow_manager.py +++ b/plugins/modules/inventory_workflow_manager.py @@ -1799,7 +1799,7 @@ def provision_wired_device_v1(self, device_ip, site_name, device_type): site_name (str): The name of the site where the device will be provisioned. device_type (str): The type of device being provisioned. Returns: - bool: True if provisioning is successful, False otherwise. + self (object): An instance of the class after the provision operation is performed. Description: This method provisions a device with the specified IP address, site name, and device type for software versions 2.3.5.6 or earlier. @@ -1810,14 +1810,27 @@ def provision_wired_device_v1(self, device_ip, site_name, device_type): try: response = self.dnac._exec(family="sda", function='provision_wired_device', op_modifies=True, params=provision_params) self.log("Received API response from 'provision_wired_device': {0}".format(response), "DEBUG") + if response: - validation_string = "successfully" - self.check_task_response_status(response, validation_string, 'provision_wired_device') - self.deleted_devices.append(device_ip) + exec_id = response.get("executionId") + response = self.get_execution_details(exec_id) + while True: + if response.get("status") == "SUCCESS": + self.log("Device: {0} successfully provisioned to the site {1}".format(device_ip, site_name), "INFO") + self.provision_count += 1 + self.provisioned_device.append(device_ip) + break + elif response.get("status") == "FAILURE": + self.log("Failed to provision device: {0}".format(device_ip), "ERROR") + raise Exception + else: + self.log("Provisioning in progress for device: {0}".format(device_ip), "DEBUG") except Exception as e: self.handle_provisioning_exception(device_ip, e, device_type) + return self + def provision_wired_device_v2(self, device_ip, site_name): """ Provisions a device for versions > 2.3.5.6. From a4c4752609c4ac88697c55290e5316567c88589d Mon Sep 17 00:00:00 2001 From: MUTHU-RAKESH-27 <19cs127@psgitech.ac.in> Date: Fri, 8 Nov 2024 08:18:11 +0530 Subject: [PATCH 28/83] Addressed the review comments --- plugins/modules/diff.txt | 172 ++++++++++++++++++ .../sda_fabric_devices_workflow_manager.py | 17 +- 2 files changed, 180 insertions(+), 9 deletions(-) create mode 100644 plugins/modules/diff.txt diff --git a/plugins/modules/diff.txt b/plugins/modules/diff.txt new file mode 100644 index 000000000..ef717f36b --- /dev/null +++ b/plugins/modules/diff.txt @@ -0,0 +1,172 @@ +diff --git a/plugins/modules/sda_fabric_devices_workflow_manager.py b/plugins/modules/sda_fabric_devices_workflow_manager.py +index 3e341630..b6d80b33 100644 +--- a/plugins/modules/sda_fabric_devices_workflow_manager.py ++++ b/plugins/modules/sda_fabric_devices_workflow_manager.py +@@ -130,14 +130,14 @@ options: + is_default_exit: + description: + - Indicates whether this Border Node serves as the default gateway for traffic exiting the virtual network. +- - The `is_default_exit` can be updated. ++ - The `is_default_exit` cannot be updated. + type: bool + default: true + import_external_routes: + description: + - Determines whether routes from external networks are imported into the fabric. + - Enhances security by limiting route usage to internal routes. +- - The 'import_external_routes' can be updated. ++ - The 'import_external_routes' cannot be updated. + type: bool + default: true + border_priority: +@@ -2442,7 +2442,7 @@ class FabricDevices(DnacBase): + network_id (str): The Id of the network device. + device_details (dict): Playbook details containing fabric devices details along + with the Border Settings, L2 Handoff, L3 SDA Handoff, L3 IP Handoff information. +- config_index (int) - Pointer to the device_config elements in the playbook. ++ config_index (int): Pointer to the device_config elements in the playbook. + Returns: + device_info (dict): The processed device details from the user playbook. + Description: +@@ -2523,8 +2523,10 @@ class FabricDevices(DnacBase): + have_border_settings = None + + # Get the border settings details from the Cisco Catalyst Center, if available +- if not borders_settings: ++ if have_device_details: + have_border_settings = have_device_details.get("borderDeviceSettings") ++ ++ if not borders_settings: + if not have_border_settings: + self.msg = ( + "The parameter 'border_settings' is mandatory when the 'device_roles' has 'BORDER_NODE' " +@@ -2536,7 +2538,10 @@ class FabricDevices(DnacBase): + device_info.update({ + "borderDeviceSettings": have_border_settings + }) +- self.log("Border settings retrieved from existing data: {}".format(have_border_settings), "DEBUG") ++ self.log( ++ "Border settings retrieved from existing data: {have_border_settings}" ++ .format(have_border_settings=have_border_settings), "DEBUG" ++ ) + return device_info + + self.log("Processing user-provided border settings", "DEBUG") +@@ -2581,8 +2586,8 @@ class FabricDevices(DnacBase): + if "LAYER_3" in border_types: + if not (layer3_settings or have_layer3_settings): + self.msg = ( +- "The parameter 'border_settings' is mandatory when the 'device_roles' has 'BORDER_NODE' " +- "for the device {ip}.".format(ip=device_ip) ++ "The parameter 'layer3_settings' is mandatory under 'borders_settings' when the " ++ "'device_roles' has 'BORDER_NODE' for the device {ip}.".format(ip=device_ip) + ) + self.status = "failed" + return self.check_return_status() +@@ -2599,27 +2604,55 @@ class FabricDevices(DnacBase): + ) + self.status = "failed" + return self.check_return_status() ++ else: ++ if have_layer3_settings and (str(local_autonomous_system_number) != str(have_layer3_settings.get("localAutonomousSystemNumber"))): ++ self.msg = ( ++ "The parameter 'local_autonomous_system_number' under 'layer3_settings' should not be " ++ "updated for the device with IP '{ip}'.".format(ip=device_ip) ++ ) ++ self.status = "failed" ++ return self.check_return_status() + + self.validate_local_autonomous_system_number(local_autonomous_system_number, device_ip) + self.log( + "Successfully validated 'local_autonomous_system_number': {asn_number}" + .format(asn_number=local_autonomous_system_number), "DEBUG" + ) +- is_default_exit = layer3_settings.get("layer3_settings") +- if not is_default_exit: ++ is_default_exit = layer3_settings.get("is_default_exit") ++ if is_default_exit is None: + if have_layer3_settings: + have_is_default_exit = have_layer3_settings.get("isDefaultExit") + is_default_exit = have_is_default_exit + else: + is_default_exit = True ++ else: ++ if have_layer3_settings: ++ have_is_default_exit = have_layer3_settings.get("importExternalRoutes") ++ if is_default_exit != have_is_default_exit: ++ self.msg = ( ++ "The parameter 'is_default_exit' under 'layer3_settings' should not be " ++ "updated for the device with IP '{ip}'.".format(ip=device_ip) ++ ) ++ self.status = "failed" ++ return self.check_return_status() + + import_external_routes = layer3_settings.get("import_external_routes") +- if not import_external_routes: ++ if import_external_routes is None: + if have_layer3_settings: + have_import_external_routes = have_layer3_settings.get("importExternalRoutes") + import_external_routes = have_import_external_routes + else: + import_external_routes = True ++ else: ++ if have_layer3_settings: ++ have_import_external_routes = have_layer3_settings.get("importExternalRoutes") ++ if import_external_routes != have_import_external_routes: ++ self.msg = ( ++ "The parameter 'import_external_routes' under 'layer3_settings' should not be " ++ "updated for the device with IP '{ip}'.".format(ip=device_ip) ++ ) ++ self.status = "failed" ++ return self.check_return_status() + + border_priority = layer3_settings.get("border_priority") + # Default value of border priority is 10 +@@ -3107,7 +3140,7 @@ class FabricDevices(DnacBase): + ) + return sda_l3_handoff_info + +- def validate_layer3_handoff_ip_transit(self, item, device_ip, is_ip_l3_handoff_exists, have_ip_l3_handoff): ++ def validate_layer3_handoff_ip_transit(self, item, device_ip, is_ip_l3_handoff_exists, have_ip_l3_handoff, l3_ip_handoff_index): + """ + Validate Layer 3 handoff IP transit parameters. + +@@ -3116,6 +3149,7 @@ class FabricDevices(DnacBase): + device_ip (str): The device IP address. + is_ip_l3_handoff_exists (int): The existence of the L3 handoff item. + have_ip_l3_handoff (dict): Existing L3 handoff details for the device. ++ l3_ip_handoff_index (int): Index for the current item in the 'have_ip_l3_handoff'. + + Returns: + tuple: A tuple containing transit_id, interface_name, virtual_network_name, vlan_id, tcp_mss_adjustment +@@ -3217,9 +3251,9 @@ class FabricDevices(DnacBase): + self.log(self.msg, "ERROR") + return (None, None, None, None, None, False) + elif virtual_network_name and (not vlan_id): +- vlan_id = have_ip_l3_handoff.get("vlanId") ++ vlan_id = have_ip_l3_handoff[l3_ip_handoff_index].get("vlanId") + elif vlan_id and (not virtual_network_name): +- virtual_network_name = have_ip_l3_handoff.get("virtualNetworkName") ++ virtual_network_name = have_ip_l3_handoff[l3_ip_handoff_index].get("virtualNetworkName") + else: + if not (virtual_network_name and vlan_id): + self.msg = ( +@@ -3234,7 +3268,7 @@ class FabricDevices(DnacBase): + tcp_mss_adjustment = item.get("tcp_mss_adjustment") + if not tcp_mss_adjustment: + if is_ip_l3_handoff_exists: +- have_tcp_mss_adjustment = have_ip_l3_handoff.get("tcpMssAdjustment") ++ have_tcp_mss_adjustment = have_ip_l3_handoff[l3_ip_handoff_index].get("tcpMssAdjustment") + if have_tcp_mss_adjustment: + tcp_mss_adjustment = have_tcp_mss_adjustment + else: +@@ -3322,7 +3356,8 @@ class FabricDevices(DnacBase): + + (transit_id, interface_name, virtual_network_name, vlan_id, tcp_mss_adjustment, is_valid) = \ + self.validate_layer3_handoff_ip_transit( +- item, device_details.get("device_ip"), is_ip_l3_handoff_exists, have_ip_l3_handoff ++ item, device_details.get("device_ip"), is_ip_l3_handoff_exists, ++ have_ip_l3_handoff, l3_ip_handoff_index + ) + + if not is_valid: diff --git a/plugins/modules/sda_fabric_devices_workflow_manager.py b/plugins/modules/sda_fabric_devices_workflow_manager.py index b6d80b330..ba0a7085a 100644 --- a/plugins/modules/sda_fabric_devices_workflow_manager.py +++ b/plugins/modules/sda_fabric_devices_workflow_manager.py @@ -2568,8 +2568,8 @@ def get_device_params(self, fabric_id, network_id, device_details, config_index) if not border_types: self.msg = ( - "Either L3 or L2 Handoff should be set. Please provide the 'layer3_settings' or " - "'layer2_handoff' for the device with IP '{ip}'".format(ip=device_ip) + "The 'layer3_settings' parameter is required under 'borders_settings' when " + "'device_roles' includes 'BORDER_NODE' for device {ip}.".format(ip=device_ip) ) self.status = "failed" return self.check_return_status() @@ -2605,10 +2605,11 @@ def get_device_params(self, fabric_id, network_id, device_details, config_index) self.status = "failed" return self.check_return_status() else: - if have_layer3_settings and (str(local_autonomous_system_number) != str(have_layer3_settings.get("localAutonomousSystemNumber"))): + existing_as_number = have_layer3_settings.get("localAutonomousSystemNumber") if have_layer3_settings else None + if existing_as_number and str(local_autonomous_system_number) != str(existing_as_number): self.msg = ( - "The parameter 'local_autonomous_system_number' under 'layer3_settings' should not be " - "updated for the device with IP '{ip}'.".format(ip=device_ip) + "The parameter 'local_autonomous_system_number' in 'layer3_settings' must not be updated " + "for the device with IP '{ip}'.".format(ip=device_ip) ) self.status = "failed" return self.check_return_status() @@ -2621,14 +2622,12 @@ def get_device_params(self, fabric_id, network_id, device_details, config_index) is_default_exit = layer3_settings.get("is_default_exit") if is_default_exit is None: if have_layer3_settings: - have_is_default_exit = have_layer3_settings.get("isDefaultExit") - is_default_exit = have_is_default_exit + is_default_exit = have_layer3_settings.get("isDefaultExit", True) else: is_default_exit = True else: if have_layer3_settings: - have_is_default_exit = have_layer3_settings.get("importExternalRoutes") - if is_default_exit != have_is_default_exit: + if is_default_exit != have_layer3_settings.get("importExternalRoutes"): self.msg = ( "The parameter 'is_default_exit' under 'layer3_settings' should not be " "updated for the device with IP '{ip}'.".format(ip=device_ip) From e454d275c1f208410bef908fb44f200133993226 Mon Sep 17 00:00:00 2001 From: MUTHU-RAKESH-27 <19cs127@psgitech.ac.in> Date: Fri, 8 Nov 2024 08:27:01 +0530 Subject: [PATCH 29/83] Removed the diff.txt file --- plugins/modules/diff.txt | 172 --------------------------------------- 1 file changed, 172 deletions(-) delete mode 100644 plugins/modules/diff.txt diff --git a/plugins/modules/diff.txt b/plugins/modules/diff.txt deleted file mode 100644 index ef717f36b..000000000 --- a/plugins/modules/diff.txt +++ /dev/null @@ -1,172 +0,0 @@ -diff --git a/plugins/modules/sda_fabric_devices_workflow_manager.py b/plugins/modules/sda_fabric_devices_workflow_manager.py -index 3e341630..b6d80b33 100644 ---- a/plugins/modules/sda_fabric_devices_workflow_manager.py -+++ b/plugins/modules/sda_fabric_devices_workflow_manager.py -@@ -130,14 +130,14 @@ options: - is_default_exit: - description: - - Indicates whether this Border Node serves as the default gateway for traffic exiting the virtual network. -- - The `is_default_exit` can be updated. -+ - The `is_default_exit` cannot be updated. - type: bool - default: true - import_external_routes: - description: - - Determines whether routes from external networks are imported into the fabric. - - Enhances security by limiting route usage to internal routes. -- - The 'import_external_routes' can be updated. -+ - The 'import_external_routes' cannot be updated. - type: bool - default: true - border_priority: -@@ -2442,7 +2442,7 @@ class FabricDevices(DnacBase): - network_id (str): The Id of the network device. - device_details (dict): Playbook details containing fabric devices details along - with the Border Settings, L2 Handoff, L3 SDA Handoff, L3 IP Handoff information. -- config_index (int) - Pointer to the device_config elements in the playbook. -+ config_index (int): Pointer to the device_config elements in the playbook. - Returns: - device_info (dict): The processed device details from the user playbook. - Description: -@@ -2523,8 +2523,10 @@ class FabricDevices(DnacBase): - have_border_settings = None - - # Get the border settings details from the Cisco Catalyst Center, if available -- if not borders_settings: -+ if have_device_details: - have_border_settings = have_device_details.get("borderDeviceSettings") -+ -+ if not borders_settings: - if not have_border_settings: - self.msg = ( - "The parameter 'border_settings' is mandatory when the 'device_roles' has 'BORDER_NODE' " -@@ -2536,7 +2538,10 @@ class FabricDevices(DnacBase): - device_info.update({ - "borderDeviceSettings": have_border_settings - }) -- self.log("Border settings retrieved from existing data: {}".format(have_border_settings), "DEBUG") -+ self.log( -+ "Border settings retrieved from existing data: {have_border_settings}" -+ .format(have_border_settings=have_border_settings), "DEBUG" -+ ) - return device_info - - self.log("Processing user-provided border settings", "DEBUG") -@@ -2581,8 +2586,8 @@ class FabricDevices(DnacBase): - if "LAYER_3" in border_types: - if not (layer3_settings or have_layer3_settings): - self.msg = ( -- "The parameter 'border_settings' is mandatory when the 'device_roles' has 'BORDER_NODE' " -- "for the device {ip}.".format(ip=device_ip) -+ "The parameter 'layer3_settings' is mandatory under 'borders_settings' when the " -+ "'device_roles' has 'BORDER_NODE' for the device {ip}.".format(ip=device_ip) - ) - self.status = "failed" - return self.check_return_status() -@@ -2599,27 +2604,55 @@ class FabricDevices(DnacBase): - ) - self.status = "failed" - return self.check_return_status() -+ else: -+ if have_layer3_settings and (str(local_autonomous_system_number) != str(have_layer3_settings.get("localAutonomousSystemNumber"))): -+ self.msg = ( -+ "The parameter 'local_autonomous_system_number' under 'layer3_settings' should not be " -+ "updated for the device with IP '{ip}'.".format(ip=device_ip) -+ ) -+ self.status = "failed" -+ return self.check_return_status() - - self.validate_local_autonomous_system_number(local_autonomous_system_number, device_ip) - self.log( - "Successfully validated 'local_autonomous_system_number': {asn_number}" - .format(asn_number=local_autonomous_system_number), "DEBUG" - ) -- is_default_exit = layer3_settings.get("layer3_settings") -- if not is_default_exit: -+ is_default_exit = layer3_settings.get("is_default_exit") -+ if is_default_exit is None: - if have_layer3_settings: - have_is_default_exit = have_layer3_settings.get("isDefaultExit") - is_default_exit = have_is_default_exit - else: - is_default_exit = True -+ else: -+ if have_layer3_settings: -+ have_is_default_exit = have_layer3_settings.get("importExternalRoutes") -+ if is_default_exit != have_is_default_exit: -+ self.msg = ( -+ "The parameter 'is_default_exit' under 'layer3_settings' should not be " -+ "updated for the device with IP '{ip}'.".format(ip=device_ip) -+ ) -+ self.status = "failed" -+ return self.check_return_status() - - import_external_routes = layer3_settings.get("import_external_routes") -- if not import_external_routes: -+ if import_external_routes is None: - if have_layer3_settings: - have_import_external_routes = have_layer3_settings.get("importExternalRoutes") - import_external_routes = have_import_external_routes - else: - import_external_routes = True -+ else: -+ if have_layer3_settings: -+ have_import_external_routes = have_layer3_settings.get("importExternalRoutes") -+ if import_external_routes != have_import_external_routes: -+ self.msg = ( -+ "The parameter 'import_external_routes' under 'layer3_settings' should not be " -+ "updated for the device with IP '{ip}'.".format(ip=device_ip) -+ ) -+ self.status = "failed" -+ return self.check_return_status() - - border_priority = layer3_settings.get("border_priority") - # Default value of border priority is 10 -@@ -3107,7 +3140,7 @@ class FabricDevices(DnacBase): - ) - return sda_l3_handoff_info - -- def validate_layer3_handoff_ip_transit(self, item, device_ip, is_ip_l3_handoff_exists, have_ip_l3_handoff): -+ def validate_layer3_handoff_ip_transit(self, item, device_ip, is_ip_l3_handoff_exists, have_ip_l3_handoff, l3_ip_handoff_index): - """ - Validate Layer 3 handoff IP transit parameters. - -@@ -3116,6 +3149,7 @@ class FabricDevices(DnacBase): - device_ip (str): The device IP address. - is_ip_l3_handoff_exists (int): The existence of the L3 handoff item. - have_ip_l3_handoff (dict): Existing L3 handoff details for the device. -+ l3_ip_handoff_index (int): Index for the current item in the 'have_ip_l3_handoff'. - - Returns: - tuple: A tuple containing transit_id, interface_name, virtual_network_name, vlan_id, tcp_mss_adjustment -@@ -3217,9 +3251,9 @@ class FabricDevices(DnacBase): - self.log(self.msg, "ERROR") - return (None, None, None, None, None, False) - elif virtual_network_name and (not vlan_id): -- vlan_id = have_ip_l3_handoff.get("vlanId") -+ vlan_id = have_ip_l3_handoff[l3_ip_handoff_index].get("vlanId") - elif vlan_id and (not virtual_network_name): -- virtual_network_name = have_ip_l3_handoff.get("virtualNetworkName") -+ virtual_network_name = have_ip_l3_handoff[l3_ip_handoff_index].get("virtualNetworkName") - else: - if not (virtual_network_name and vlan_id): - self.msg = ( -@@ -3234,7 +3268,7 @@ class FabricDevices(DnacBase): - tcp_mss_adjustment = item.get("tcp_mss_adjustment") - if not tcp_mss_adjustment: - if is_ip_l3_handoff_exists: -- have_tcp_mss_adjustment = have_ip_l3_handoff.get("tcpMssAdjustment") -+ have_tcp_mss_adjustment = have_ip_l3_handoff[l3_ip_handoff_index].get("tcpMssAdjustment") - if have_tcp_mss_adjustment: - tcp_mss_adjustment = have_tcp_mss_adjustment - else: -@@ -3322,7 +3356,8 @@ class FabricDevices(DnacBase): - - (transit_id, interface_name, virtual_network_name, vlan_id, tcp_mss_adjustment, is_valid) = \ - self.validate_layer3_handoff_ip_transit( -- item, device_details.get("device_ip"), is_ip_l3_handoff_exists, have_ip_l3_handoff -+ item, device_details.get("device_ip"), is_ip_l3_handoff_exists, -+ have_ip_l3_handoff, l3_ip_handoff_index - ) - - if not is_valid: From 7271f5e715f372e145752d0700266127797ab001 Mon Sep 17 00:00:00 2001 From: Syed-khadeerahmed Date: Fri, 8 Nov 2024 10:21:36 +0530 Subject: [PATCH 30/83] coding is in progress --- playbooks/swim_workflow_manager.yml | 30 ++++-- plugins/modules/swim_workflow_manager.py | 122 +++++------------------ 2 files changed, 45 insertions(+), 107 deletions(-) diff --git a/playbooks/swim_workflow_manager.yml b/playbooks/swim_workflow_manager.yml index dcadb3793..2f2073f60 100644 --- a/playbooks/swim_workflow_manager.yml +++ b/playbooks/swim_workflow_manager.yml @@ -22,16 +22,24 @@ dnac_api_task_timeout: 1000 dnac_task_poll_interval: 1 config: - - image_distribution_details: - site_name: Global/Chennai/LTTS/FLOOR1 + - tagging_details: + image_name: cat9k_iosxe.17.15.01.SPA.bin + device_role: core, distribution device_image_family_name: Cisco Catalyst 9300 Switch - device_role: CORE - device_family_name: Switches and Hubs - device_series_name: Cisco Catalyst 9300 Series Switches + # site_name: Global/Chennai/LTTS/FLOOR1 + tagging: False - # - tagging_details: - # image_name: cat9k_iosxe.17.15.01.SPA.bin - # device_role: ["core"] - # device_image_family_name: Cisco Catalyst 9300 Switch - # # site_name: Global/Chennai/LTTS/FLOOR1 - # tagging: True + + # - image_distribution_details: + # site_name: Global/Chennai/LTTS/FLOOR1 + # device_image_family_name: Cisco Catalyst 9300 Switch + # device_role: CORE + # device_family_name: Switches and Hubs + # device_series_name: Cisco Catalyst 9300 Series Switches + + # - image_distribution_details: + # # image_name: cat9k_iosxe.17.12.01.SPA.bin + # site_name: Global/USA/San Francisco/BGL_18 + # device_role: ALL + # device_family_name: Switches and Hubs + # device_series_name: Cisco Catalyst 9300 Series Switches \ No newline at end of file diff --git a/plugins/modules/swim_workflow_manager.py b/plugins/modules/swim_workflow_manager.py index b618c6e76..ce04dda10 100644 --- a/plugins/modules/swim_workflow_manager.py +++ b/plugins/modules/swim_workflow_manager.py @@ -1165,10 +1165,6 @@ def get_have(self): have = {} distribution_details = self.want.get("distribution_details") site_name = distribution_details.get("site_name") - - family_name = distribution_details.get("device_image_family_name") - self.get_device_family_identifier(family_name) - if site_name: site_exists = False (site_exists, site_id) = self.site_exists(site_name) @@ -1176,8 +1172,7 @@ def get_have(self): if site_exists: have["site_id"] = site_id self.log("Site '{0}' exists and has the site ID: {1}".format(site_name, str(site_id)), "DEBUG") - golden_tagged_images_id = self.get_golden_image_id(site_id) - self.log(golden_tagged_images_id) + # check if image for distributon is available if distribution_details.get("image_name"): name = distribution_details.get("image_name").split("/")[-1] @@ -1187,9 +1182,9 @@ def get_have(self): elif self.have.get("imported_image_id"): have["distribution_image_id"] = self.have.get("imported_image_id") - # else: - # self.log("Image details required for distribution have not been provided", "ERROR") - # self.module.fail_json(msg="Image details required for distribution have not been provided", response=[]) + else: + self.log("Image details required for distribution have not been provided", "ERROR") + self.module.fail_json(msg="Image details required for distribution have not been provided", response=[]) device_params = { "hostname": distribution_details.get("device_hostname"), @@ -1529,55 +1524,6 @@ def get_diff_import(self): return self - def get_golden_image_id(self, site_id): - distribution_details = self.want.get("distribution_details") - distribution_device_role = distribution_details.get("device_role") - self.log(self.want.get("device_role")) - golden_tagged_image_ids = [] - final_golden_tagged_image = [] - self.log("Inside get_golden_image_id function", "DEBUG") - - image_response = self.dnac._exec( - family="software_image_management_swim", - function="get_software_image_details", - params={"isTaggedGolden": True}, - ) - - if not image_response or "response" not in image_response: - self.log("Empty or invalid response received from 'get_software_image_details'", "ERROR") - return golden_tagged_image_ids - - self.log("Received API response from 'get_software_image_details': {0}".format(image_response), "DEBUG") - - for item in image_response.get("response", []): - image_uuid = item.get("imageUuid") - if image_uuid: - golden_tagged_image_ids.append(image_uuid) - image_params = { - "image_id": image_uuid, - "site_id": site_id, - "device_family_identifier": self.have.get("device_family_identifier"), - "device_role": distribution_device_role - } - self.log(image_params) - - golden_tag_response = self.dnac._exec( - family="software_image_management_swim", - function="get_golden_tag_status_of_an_image", - op_modifies=True, - params=image_params - ) - self.log(golden_tag_response) - if golden_tag_response: - if golden_tag_response.get("response").get("taggedGolden") is True: - final_golden_tagged_image.append(image_uuid) - self.log(final_golden_tagged_image) - - self.log("Golden tagged image IDs: {0}".format(golden_tagged_image_ids), "DEBUG") - self.log("specific golden tagged image for the given parameter is {0}".format(final_golden_tagged_image)) - - return final_golden_tagged_image - def get_diff_tagging(self): """ Tag or untag a software image as golden based on provided tagging details. @@ -1597,16 +1543,16 @@ def get_diff_tagging(self): tag_image_golden = tagging_details.get("tagging") image_name = self.get_image_name_from_id(self.have.get("tagging_image_id")) device_role = tagging_details.get("device_role", "ALL") - - if isinstance(device_role, str): - device_role = [device_role] - + device_role_no = [] already_un_tagged_device_role = [] already_tagged_device_role = [] device_roles = ["core", "distribution", "access", "border router", "unknown", "all"] - for role in device_role: + for role in device_role.split(','): + role = role.strip() + device_role_no.append(role) + if role.lower() not in device_roles: self.status = "failed" self.msg = ( @@ -1617,14 +1563,13 @@ def get_diff_tagging(self): self.result['response'] = self.msg self.check_return_status() - for role in device_role: + for role in device_role.split(','): image_params = { "image_id": self.have.get("tagging_image_id"), "site_id": self.have.get("site_id"), "device_family_identifier": self.have.get("device_family_identifier"), "device_role": role.upper() } - self.log(image_params) response = self.dnac._exec( family="software_image_management_swim", @@ -1637,20 +1582,25 @@ def get_diff_tagging(self): api_response = response.get('response') if api_response: image_status = api_response.get('taggedGolden') + self.log(image_status) + self.log(tag_image_golden) if image_status == tag_image_golden: + self.log("inside if") msg = "SWIM Image '{0}' already tagged as Golden image in Cisco Catalyst Center".format(image_name) self.log(msg, "INFO") already_tagged_device_role.append(role) + if not image_status and image_status == tag_image_golden: msg = "SWIM Image '{0}' already un-tagged from Golden image in Cisco Catalyst Center".format(image_name) self.log(msg, "INFO") already_un_tagged_device_role.append(role) + # Check if all roles are tagged as Golden if tag_image_golden: - if len(already_tagged_device_role) == len(device_role): + if len(already_tagged_device_role) == len(device_role_no): self.status = "success" self.result['changed'] = False self.msg = "SWIM Image '{0}' already tagged as Golden image in Cisco Catalyst Center for the roles - {1}.".format(image_name, device_role) @@ -1659,7 +1609,10 @@ def get_diff_tagging(self): self.log(self.msg, "INFO") return self else: - if len(already_un_tagged_device_role) == len(device_role): + self.log("inside else for already tagged") + self.log(len(already_un_tagged_device_role)) + self.log(len(device_role_no)) + if len(already_un_tagged_device_role) == len(device_role_no): self.log("inside logic") self.status = "success" self.result['changed'] = False @@ -1670,26 +1623,7 @@ def get_diff_tagging(self): return self if tag_image_golden: - image_param = dict( - site_id=self.have.get("site_id"), - device_family_identifier=self.have.get("device_family_identifier"), - device_role="ALL", - image_id=self.have.get("tagging_image_id"), - ) - self.log("Parameters for un-tagging the image as golden: {0}".format(str(image_param)), "INFO") - - response = self.dnac._exec( - family="software_image_management_swim", - function='remove_golden_tag_for_image', - op_modifies=True, - params=image_param - ) - self.log("Received API response from 'remove_golden_tag_for_image': {0}".format(str(response)), "DEBUG") - if not response: - self.status = "failed" - self.msg = "Did not get the response of API so cannot check the Golden tagging status of image - {0}".format(image_name) - - for role in device_role: + for role in device_role.split(','): image_params = dict( imageId=self.have.get("tagging_image_id"), siteId=self.have.get("site_id"), @@ -1707,8 +1641,7 @@ def get_diff_tagging(self): self.log("Received API response from 'tag_as_golden_image': {0}".format(str(response)), "DEBUG") else: - for role in device_role: - self.log(role) + for role in device_role.split(','): image_params = { "image_id": self.have.get("tagging_image_id"), "site_id": self.have.get("site_id"), @@ -2241,10 +2174,7 @@ def verify_diff_tagged(self): image_name = self.get_image_name_from_id(image_id) device_role = tagging_details.get("device_role", "ALL") - if isinstance(device_role, str): - device_role = [device_role] - - for role in device_role: + for role in device_role.split(','): image_params = dict( image_id=self.have.get("tagging_image_id"), site_id=self.have.get("site_id"), @@ -2439,9 +2369,9 @@ def main(): ccc_swims.get_want(config).check_return_status() ccc_swims.get_diff_import().check_return_status() ccc_swims.get_have().check_return_status() - # ccc_swims.get_diff_state_apply[state](config).check_return_status() - # if config_verify: - # ccc_swims.verify_diff_state_apply[state](config).check_return_status() + ccc_swims.get_diff_state_apply[state](config).check_return_status() + if config_verify: + ccc_swims.verify_diff_state_apply[state](config).check_return_status() module.exit_json(**ccc_swims.result) From cb9d755319cc1b47c06c87dca9e3be88a534e913 Mon Sep 17 00:00:00 2001 From: JosePabloOcampo1212 Date: Fri, 8 Nov 2024 08:40:36 -0600 Subject: [PATCH 31/83] Changes to resolve issues #200 'Sanity errors in discovery and event subscription modules.' --- plugins/action/event_subscription.py | 1 + plugins/modules/discovery.py | 15 +++------------ 2 files changed, 4 insertions(+), 12 deletions(-) diff --git a/plugins/action/event_subscription.py b/plugins/action/event_subscription.py index bb444e268..fdbaa9cf6 100644 --- a/plugins/action/event_subscription.py +++ b/plugins/action/event_subscription.py @@ -103,6 +103,7 @@ def get_object_by_id(self, id): family="event_management", function="get_event_subscriptions", ) + tmp_result = None if isinstance(items, dict): if 'response' in items: items = items.get('response') diff --git a/plugins/modules/discovery.py b/plugins/modules/discovery.py index 389e13798..579b4cf1e 100644 --- a/plugins/modules/discovery.py +++ b/plugins/modules/discovery.py @@ -175,12 +175,6 @@ snmpPrivProtocol: description: SNMP privacy protocol. 'AES128'. type: str - snmpROCommunity: - description: SNMP RO community of the devices to be discovered. - type: str - snmpROCommunityDesc: - description: Description for SNMP RO community. - type: str snmpRWCommunity: description: SNMP RW community of the devices to be discovered. type: str @@ -208,9 +202,6 @@ timeOut: description: Time to wait for device response. type: int - timeout: - description: Time to wait for device response in seconds. - type: int updateMgmtIp: description: Updates Management IP if multiple IPs are available for a device. If set to true, when a device is rediscovered with a different IP, the management @@ -371,13 +362,13 @@ snmpMode: string snmpPrivPassphrase: string snmpPrivProtocol: string - snmpROCommunity: string - snmpROCommunityDesc: string + snmpRoCommunity: string + snmpRoCommunityDesc: string snmpRWCommunity: string snmpRWCommunityDesc: string snmpUserName: string snmpVersion: string - timeout: 0 + timeOut: 0 userNameList: - string From 694c78f897e7ee54895a3b687b9a4b5ad9260c1c Mon Sep 17 00:00:00 2001 From: JosePabloOcampo1212 Date: Fri, 8 Nov 2024 09:58:50 -0600 Subject: [PATCH 32/83] Modifications of the changelog --- changelogs/changelog.yaml | 7 +++++++ galaxy.yml | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 8d3f5fb60..41a12800e 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -1064,3 +1064,10 @@ releases: - Changes in dnac.py - inventory_workflow_manager.py - added attribute hostnames, serial_numbers and mac_addresses - inventory_workflow_manager.py - Removed attribute hostname_list, serial_number_list and mac_address_list + 6.23.0: + release_date: "2024-11-08" + changes: + release_summary: modifications to fix issues '#200'. + minor_changes: + - Removing duplicates in the discovery.py module + - Variable initialization in the event_subscription.py action \ No newline at end of file diff --git a/galaxy.yml b/galaxy.yml index eee72fc97..f719fc0d8 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -1,7 +1,7 @@ --- namespace: cisco name: dnac -version: 6.22.0 +version: 6.23.0 readme: README.md authors: - Rafael Campos From 13ed946674edd5002b4938b91266918371a4dfa5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Francisco=20Mu=C3=B1oz=20Miranda?= <93611708+fmunozmiranda@users.noreply.github.com> Date: Fri, 8 Nov 2024 11:05:21 -0600 Subject: [PATCH 33/83] Update sanity_tests.yml --- .github/workflows/sanity_tests.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/sanity_tests.yml b/.github/workflows/sanity_tests.yml index e974d81c7..efff5cd39 100644 --- a/.github/workflows/sanity_tests.yml +++ b/.github/workflows/sanity_tests.yml @@ -25,6 +25,7 @@ jobs: - stable-2.15 - stable-2.16 - stable-2.17 + - devel runs-on: ubuntu-22.04 steps: - name: Check out code From 4cb80e844393aa43b6be5bd4b7ca12c3729fcdb7 Mon Sep 17 00:00:00 2001 From: Madhan Date: Sun, 10 Nov 2024 09:25:29 +0530 Subject: [PATCH 34/83] Spelling error in docstring --- plugins/module_utils/dnac.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/module_utils/dnac.py b/plugins/module_utils/dnac.py index e89c71917..18d9f7078 100644 --- a/plugins/module_utils/dnac.py +++ b/plugins/module_utils/dnac.py @@ -535,7 +535,7 @@ def get_execution_details(self, exec_id): def check_execution_response_status(self, response, api_name): """ - Checks the reponse status provided by API in the Cisco Catalyst Center + Checks the response status provided by API in the Cisco Catalyst Center Args: response (dict) - API response api_name (str) - API name From 14897d3a6899afbce92cd9649949b2e2f4937e1e Mon Sep 17 00:00:00 2001 From: MUTHU-RAKESH-27 <19cs127@psgitech.ac.in> Date: Mon, 11 Nov 2024 11:48:43 +0530 Subject: [PATCH 35/83] Added a function to remove the duplicates of control plane node ips --- .../sda_fabric_transits_workflow_manager.py | 45 ++++++++++++++++++- 1 file changed, 44 insertions(+), 1 deletion(-) diff --git a/plugins/modules/sda_fabric_transits_workflow_manager.py b/plugins/modules/sda_fabric_transits_workflow_manager.py index 7dc52dc98..f71ff56a0 100644 --- a/plugins/modules/sda_fabric_transits_workflow_manager.py +++ b/plugins/modules/sda_fabric_transits_workflow_manager.py @@ -746,6 +746,49 @@ def handle_ip_transit_settings(self, item, fabric_transits_values, fabric_transi return fabric_transits_values + def remove_duplicates(self, control_plane_ips): + """ + Remove the duplicates from the given list. + + Parameters: + control_plane_ips (list): List of elements which may contain duplicates. + Returns: + final_control_plane_ips (list): List of elements with out duplicates. + Description: + Return empty list if the list is empty or it is a NoneType. Check whether any + duplicates is present in the list or not. If yes, remove them and return the list. + """ + + self.log( + "The list of control plane ips before removing the duplicates {list_of_ips}" + .format(list_of_ips=control_plane_ips) + ) + final_control_plane_ips = [] + + # No need to proceed when there is no elements in the list + if not control_plane_ips: + self.log("Returning the empty control plane list of IPs.") + return final_control_plane_ips + + control_plane_ips = sorted(control_plane_ips) + length_control_plane_ips = len(control_plane_ips) + + # No need to check for the duplicates when there is only one element in the list + if length_control_plane_ips == 1: + return control_plane_ips + + final_control_plane_ips.append(control_plane_ips[0]) + for i in range(1, length_control_plane_ips): + if control_plane_ips[i] != control_plane_ips[i - 1]: + final_control_plane_ips.append(control_plane_ips[i]) + + self.log( + "The list of control plane IPs after removing the duplicates '{list_of_ips}'" + .format(list_of_ips=final_control_plane_ips) + ) + + return final_control_plane_ips + def handle_sda_transit_settings(self, item, fabric_transits_values, transit_type, fabric_transit_index): """ Handle the SDA transit settings details. @@ -779,7 +822,7 @@ def handle_sda_transit_settings(self, item, fabric_transits_values, transit_type else: sda_transit_settings.update({"isMulticastOverTransitEnabled": False}) - control_plane_network_device_ips = want_sda_transit_settings.get("control_plane_network_device_ips") + control_plane_network_device_ips = self.remove_duplicates(want_sda_transit_settings.get("control_plane_network_device_ips")) if have_sda_transit_settings and not control_plane_network_device_ips: sda_transit_settings.update({"controlPlaneNetworkDeviceIds": sorted(have_sda_transit_settings.get("controlPlaneNetworkDeviceIds"))}) elif control_plane_network_device_ips: From 422ff09f91f53e2d242caa2237c5fdda7e00eb5e Mon Sep 17 00:00:00 2001 From: Syed-khadeerahmed Date: Mon, 11 Nov 2024 15:15:25 +0530 Subject: [PATCH 36/83] bug fixed --- plugins/modules/provision_workflow_manager.py | 42 +------------------ 1 file changed, 2 insertions(+), 40 deletions(-) diff --git a/plugins/modules/provision_workflow_manager.py b/plugins/modules/provision_workflow_manager.py index c5bedf2ee..f7a96db6b 100644 --- a/plugins/modules/provision_workflow_manager.py +++ b/plugins/modules/provision_workflow_manager.py @@ -820,45 +820,6 @@ def get_wireless_params(self): self.log("Managed AP Location must be a floor", "CRITICAL") self.module.fail_json(msg="Managed AP Location must be a floor", response=[]) - else: - self.log("Checking for mandatory interface fields in Catalyst Center version >= 2.3.7.6", "DEBUG") - interfaces = self.validated_config.get("dynamic_interfaces", []) - self.log("Configured interfaces: {0}".format(interfaces), "DEBUG") - has_interface_name = False - has_vlan_id = False - - if interfaces is None: - self.msg = ("It appears that the 'dynamic_interfaces' parameter is either missing or set to None. " - "Please note that this parameter is required for provisioning a wireless device in " - "Catalyst Center version 2.3.7.6 or higher.") - self.log(self.msg, "ERROR") - self.result['response'] = self.msg - self.status = "failed" - self.check_return_status() - - for interface in interfaces: - if 'interface_name' in interface: - has_interface_name = True - if 'vlan_id' in interface: - has_vlan_id = True - self.log("Presence of 'interface_name' in interfaces: {0}".format(has_interface_name), "DEBUG") - self.log("Presence of 'vlan_id' in interfaces: {0}".format(has_vlan_id), "DEBUG") - - missing_fields = [] - if not has_interface_name: - missing_fields.append("interface_name") - if not has_vlan_id: - missing_fields.append("vlan_id") - - if missing_fields: - missing_fields_str = ', '.join(missing_fields) - self.msg = ("The following required fields for provisioning a wireless device in version" - " 2.3.7.6 are currently missing: {0}".format(missing_fields_str)) - self.log(self.msg, "ERROR") - self.result['response'] = self.msg - self.status = "failed" - self.check_return_status() - wireless_params[0]["dynamicInterfaces"] = [] if self.validated_config.get("dynamic_interfaces"): for interface in self.validated_config.get("dynamic_interfaces"): @@ -873,7 +834,8 @@ def get_wireless_params(self): wireless_params[0]["dynamicInterfaces"].append(interface_dict) wireless_params[0]["skip_ap_provision"] = self.validated_config.get("skip_ap_provision") - wireless_params[0]["primaryManagedAPLocationsSiteIds"] = self.validated_config.get("primary_managed_ap_Locations") + primary_ap_location = self.validated_config.get("primary_managed_ap_Locations") or self.validated_config.get("managed_ap_locations") + wireless_params[0]["primaryManagedAPLocationsSiteIds"] = primary_ap_location wireless_params[0]["secondaryManagedAPLocationsSiteIds"] = self.validated_config.get("secondary_managed_ap_locations") if self.validated_config.get("rolling_ap_upgrade"): From 8b706b2c67006fec90c8087736de30809f786e8b Mon Sep 17 00:00:00 2001 From: MUTHU-RAKESH-27 <19cs127@psgitech.ac.in> Date: Mon, 11 Nov 2024 15:44:46 +0530 Subject: [PATCH 37/83] Addressed the review comments --- .../sda_fabric_transits_workflow_manager.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/plugins/modules/sda_fabric_transits_workflow_manager.py b/plugins/modules/sda_fabric_transits_workflow_manager.py index f71ff56a0..02942c4ee 100644 --- a/plugins/modules/sda_fabric_transits_workflow_manager.py +++ b/plugins/modules/sda_fabric_transits_workflow_manager.py @@ -746,7 +746,7 @@ def handle_ip_transit_settings(self, item, fabric_transits_values, fabric_transi return fabric_transits_values - def remove_duplicates(self, control_plane_ips): + def remove_duplicate_ips(self, control_plane_ips): """ Remove the duplicates from the given list. @@ -761,20 +761,25 @@ def remove_duplicates(self, control_plane_ips): self.log( "The list of control plane ips before removing the duplicates {list_of_ips}" - .format(list_of_ips=control_plane_ips) + .format(list_of_ips=control_plane_ips), "DEBUG" ) final_control_plane_ips = [] # No need to proceed when there is no elements in the list if not control_plane_ips: - self.log("Returning the empty control plane list of IPs.") + self.log("Received an empty or None list. Returning an empty list.", "DEBUG") return final_control_plane_ips control_plane_ips = sorted(control_plane_ips) + self.log( + "Control plane IPs sorted: {0}".format(control_plane_ips), + "DEBUG" + ) length_control_plane_ips = len(control_plane_ips) # No need to check for the duplicates when there is only one element in the list if length_control_plane_ips == 1: + self.log("Only one IP found, no duplicates to remove.", "DEBUG") return control_plane_ips final_control_plane_ips.append(control_plane_ips[0]) @@ -784,7 +789,7 @@ def remove_duplicates(self, control_plane_ips): self.log( "The list of control plane IPs after removing the duplicates '{list_of_ips}'" - .format(list_of_ips=final_control_plane_ips) + .format(list_of_ips=final_control_plane_ips), "DEBUG" ) return final_control_plane_ips @@ -822,7 +827,7 @@ def handle_sda_transit_settings(self, item, fabric_transits_values, transit_type else: sda_transit_settings.update({"isMulticastOverTransitEnabled": False}) - control_plane_network_device_ips = self.remove_duplicates(want_sda_transit_settings.get("control_plane_network_device_ips")) + control_plane_network_device_ips = self.remove_duplicate_ips(want_sda_transit_settings.get("control_plane_network_device_ips")) if have_sda_transit_settings and not control_plane_network_device_ips: sda_transit_settings.update({"controlPlaneNetworkDeviceIds": sorted(have_sda_transit_settings.get("controlPlaneNetworkDeviceIds"))}) elif control_plane_network_device_ips: From 0913d5b1248b141805b40367688b5d6ab312fb43 Mon Sep 17 00:00:00 2001 From: Abhishek-121 Date: Mon, 11 Nov 2024 17:13:16 +0530 Subject: [PATCH 38/83] Add the new feature of enabling wired data collection before creating/updating any fabric sites/zones, also fix the issue of incorrect Fabric Site Creation for Global site and handling duplicate Input Values go wrong --- ...sda_fabric_sites_zones_workflow_manager.py | 190 +++++++++++++++++- 1 file changed, 182 insertions(+), 8 deletions(-) diff --git a/plugins/modules/sda_fabric_sites_zones_workflow_manager.py b/plugins/modules/sda_fabric_sites_zones_workflow_manager.py index ab4eca41c..6a85a82bc 100644 --- a/plugins/modules/sda_fabric_sites_zones_workflow_manager.py +++ b/plugins/modules/sda_fabric_sites_zones_workflow_manager.py @@ -529,6 +529,13 @@ def get_want(self, config): ) self.set_operation_result("failed", False, self.msg, "ERROR").check_return_status() + if site_name.title() == "Global": + self.msg = ( + "Unable to create/update the given site 'Global' to {0} as it is not allowed operation " + "in the Cisco Catalyst Center." + ).format(fabric_type) + self.set_operation_result("failed", False, self.msg, "ERROR").check_return_status() + if fabric_type not in ["fabric_site", "fabric_zone"]: self.msg = ( "Invalid fabric_type '{0}' provided. Please use 'fabric_site' or 'fabric_zone' for fabric site/zone operations" @@ -1163,6 +1170,150 @@ def update_site_zones_profile_messages(self): return self + def is_wired_data_collection_enable(self, site_name, site_id): + """ + Checks if wired data collection is enabled for a specified site. + + Args: + self (object): An instance of a class used for interacting with Cisco Catalyst Center. + site_name (str): The name of the site to check. + site_id (str): The unique identifier of the site. + Returns: + bool: True if wired data collection is enabled for the site, False otherwise. + Description: + This function logs the status of wired data collection for a given site and checks if it is enabled. + It retrieves telemetry settings for the specified site using the `retrieve_telemetry_settings_for_a_site` + API function. If telemetry settings or wired data collection details are missing or disabled, + function logs relevant messages and returns False. If wired data collection is enabled, it returns True. + """ + + self.log("Checking whether wired data collection is enabled or not for the site: {0}".format(site_name), "INFO") + + try: + telemetry_response = self.dnac._exec( + family="network_settings", + function='retrieve_telemetry_settings_for_a_site', + op_modifies=False, + params={"id": site_id} + ) + telemetry_details = telemetry_response.get("response", {}) + if not telemetry_details: + self.log("No telemetry settings found for site '{0}' (ID: {1})".format(site_name, site_id), "WARNING") + return False + + self.log("Successfully retrieved telemetry settings for site '{0}' (ID: {1}): {2}".format(site_name, site_id, telemetry_details), "DEBUG") + wired_data_collection = telemetry_details.get("wiredDataCollection") + + if not wired_data_collection: + self.log("Wired Data Collection is not enabled at this site '{0}'.".format(site_name), "DEBUG") + return False + + is_enabled = wired_data_collection.get("enableWiredDataCollection") + if not is_enabled: + self.log("Wired Data Collection is not enabled at this site '{0}'.".format(site_name), "DEBUG") + return False + + except Exception as e: + self.msg = ( + "Exception occurred while getting telemetry settings for site '{0}' (ID: {1}): {2}".format(site_name, site_id, str(e)) + ) + self.set_operation_result("failed", False, self.msg, "CRITICAL").check_return_status() + + return True + + def get_telemetry_details(self, site_name, site_id): + """ + Retrieves telemetry settings for a specified site. + + Args: + self (object): An instance of a class used for interacting with Cisco Catalyst Center. + site_name (str): The name of the site for which telemetry settings are being retrieved. + site_id (str): The unique identifier of the site. + Returns: + dict: A dictionary containing telemetry details for the site. If telemetry settings are not found + or an exception occurs, it logs an error and returns an empty dictionary. + Description: + This function logs the process of checking and retrieving telemetry settings for a specified site. + It sends a request to the `retrieve_telemetry_settings_for_a_site` API function using the provided + site ID. If no telemetry settings are found, it logs an error message and sets the operation result + to "failed." + """ + + self.log("Checking whether wired data collection is enabled or not for the site: {0}".format(site_name), "INFO") + + try: + telemetry_response = self.dnac._exec( + family="network_settings", + function='retrieve_telemetry_settings_for_a_site', + op_modifies=False, + params={"id": site_id} + ) + telemetry_details = telemetry_response.get("response", {}) + if not telemetry_details: + self.mg = "No telemetry settings found for site '{0}' (ID: {1})".format(site_name, site_id) + self.set_operation_result("failed", False, self.msg, "CRITICAL").check_return_status() + + self.log("Successfully retrieved telemetry settings for site '{0}' (ID: {1}): {2}".format(site_name, site_id, telemetry_details), "DEBUG") + + except Exception as e: + self.msg = ( + "Exception occurred while getting telemetry settings for site '{0}' (ID: {1}): {2}".format(site_name, site_id, str(e)) + ) + self.set_operation_result("failed", False, self.msg, "CRITICAL").check_return_status() + + return telemetry_details + + def enable_wired_data_collection(self, site_name, site_id): + """ + Enables wired data collection for a specified site. + + Args: + self (object): An instance of a class used for interacting with Cisco Catalyst Center. + site_name (str): The name of the site for which wired data collection should be enabled. + site_id (str): The unique identifier of the site. + Returns: + self (object): An instance of a class used for interacting with Cisco Catalyst Center. + Description: + This function enables wired data collection for a specified site in Cisco Catalyst Center. It first retrieves + the current telemetry settings for the site using `get_telemetry_details`. If the `wiredDataCollection` field + is missing, it initializes it as an empty dictionary. It then sets `enableWiredDataCollection` to `True`. + The function creates a payload with the updated telemetry settings and initiates an API call to set + telemetry settings for the site. It retrieves the task ID for the API call and checks the status of task. + If any part of the process fails, it logs an error message and sets the operation result to "failed." + If successful, it logs an informational message indicating that wired data collection was enabled. + """ + + self.log("Started the process of enabling wired data collection for site {0}...".format(site_name), "DEBUG") + + try: + telemetry_settings = self.get_telemetry_details(site_name, site_id) + if telemetry_settings.get('wiredDataCollection') is None: + telemetry_settings['wiredDataCollection'] = {} + telemetry_settings["wiredDataCollection"]["enableWiredDataCollection"] = True + + payload = { + "id": site_id, + "payload": telemetry_settings + } + task_name = "set_telemetry_settings_for_a_site" + task_id = self.get_taskid_post_api_call("network_settings", task_name, payload) + + if not task_id: + self.msg = "Unable to retrieve the task_id for the task '{0}'.".format(task_name) + self.set_operation_result("failed", False, self.msg, "ERROR") + return self + success_msg = "Wired Data Collection gets enabled for the site '{0}'".format(site_name) + self.log(success_msg, "INFO") + self.get_task_status_from_tasks_by_id(task_id, task_name, success_msg) + except Exception as e: + self.msg = ( + "An exception occured while eanbling the Wired Data Collection for the site '{0}' " + "in Cisco Catalyst Center: {1}" + ).format(site_name, str(e)) + self.set_operation_result("failed", False, self.msg, "ERROR") + + return self + def get_diff_merged(self, config): """ Creates, updates, or deletes fabric sites and zones based on the provided configuration, and manages @@ -1187,7 +1338,11 @@ def get_diff_merged(self, config): """ # Create/Update Fabric sites/zones in Cisco Catalyst Center - fabric_sites = self.want.get('fabric_sites') + raw_fabric_sites = self.want.get('fabric_sites') + # Convert each dictionary to a sorted tuple of key-value pairs + unique_fabric_sites = {tuple(sorted(d.items())) for d in raw_fabric_sites} + # Convert each unique tuple back into a dictionary + fabric_sites = [dict(t) for t in unique_fabric_sites] for site in fabric_sites: site_name = site.get("site_name_hierarchy") @@ -1207,13 +1362,24 @@ def get_diff_merged(self, config): ).format(auth_profile) self.set_operation_result("failed", False, self.msg, "ERROR").check_return_status() + self.log("Checking whether Wired Endpoint Data Collection is enabled at this site '{0}'or not".format(site_name), "INFO") + is_wired_data_enable = self.is_wired_data_collection_enable(site_name, site_id) + + if not is_wired_data_enable: + self.log("Wired Data Collection is not enabled at this site '{0}'.".format(site_name), "INFO") + self.enable_wired_data_collection(site_name, site_id).check_return_status() + self.log("Wired Data Collection is enabled at this site '{0}' successfully.".format(site_name), "INFO") + else: + self.log("Wired Data Collection is already enabled at this site '{0}'.".format(site_name), "INFO") + if fabric_type == "fabric_site": - # Check whether site is already fabric or not. + self.log("Checking whether the given site {0} is already fabric site or not.".format(site_name), "DEBUG") + if site_id not in self.have.get("fabric_sites_ids"): - # Create the fabric site in Cisco Catalyst Center + self.log("Starting the process of making site {0} as fabric site...".format(site_name), "DEBUG") self.create_fabric_site(site).check_return_status() else: - # Check whether fabric site needs any update or not + self.log("Checkiing whether the given fabric site {0} needs update or not.".format(site_name), "DEBUG") site_in_ccc = self.get_fabric_site_detail(site_name, site_id) require_update = self.fabric_site_needs_update(site, site_in_ccc) if require_update: @@ -1222,14 +1388,20 @@ def get_diff_merged(self, config): self.no_update_site.append(site_name) self.log("Fabric site '{0}' already present and doesnot need any update in the Cisco Catalyst Center.".format(site_name), "INFO") else: - # Check whether site zone is already fabric or not. + self.log("Checking whether the given site {0} is already fabric zone or not.".format(site_name), "DEBUG") + if site_id not in self.have.get("fabric_zone_ids"): - # Create the fabric zone in Cisco Catalyst Center + self.log("Starting the process of making site {0} as fabric zone...".format(site_name), "DEBUG") self.create_fabric_zone(site).check_return_status() else: - # Check whether fabric site needs any update or not + self.log("Checking whether the given fabric zone {0} needs update or not.".format(site_name), "DEBUG") zone_in_ccc = self.get_fabric_zone_detail(site_name, site_id) + if auth_profile and auth_profile != zone_in_ccc.get("authenticationProfileName"): + self.log( + "Authentication profile '{0}' not matched with the profile in the Cisco Catalyst Center " + "for the fabric zone {1}".format(auth_profile, site_name), "INFO" + ) self.update_fabric_zone(site, zone_in_ccc).check_return_status() else: self.no_update_zone.append(site_name) @@ -1389,7 +1561,9 @@ def verify_diff_merged(self, config): self.log("Desired State (want): {0}".format(str(self.want)), "INFO") if config.get('fabric_sites'): - fabric_sites = self.want.get('fabric_sites') + raw_fabric_sites = self.want.get('fabric_sites') + unique_fabric_sites = {tuple(sorted(d.items())) for d in raw_fabric_sites} + fabric_sites = [dict(t) for t in unique_fabric_sites] verify_site_list, verify_auth_list = [], [] site_name_list, auth_name_list = [], [] auth_flag = False From bcb3743da03db6b0b65738060b6041ef48fcbd4d Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Mon, 11 Nov 2024 13:18:37 -0800 Subject: [PATCH 39/83] sanity tests only on modified files --- .github/workflows/sanity_tests_devel.yml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/sanity_tests_devel.yml b/.github/workflows/sanity_tests_devel.yml index 4b51ba059..ff2ab5877 100644 --- a/.github/workflows/sanity_tests_devel.yml +++ b/.github/workflows/sanity_tests_devel.yml @@ -48,7 +48,11 @@ jobs: - name: Run sanity tests if: steps.changed_files.outputs.changed_files != '' - run: ansible-test sanity --docker -v --color --path ${{ steps.changed_files.outputs.changed_files }} + run: | + changed_files="${{ steps.changed_files.outputs.changed_files }}" + for file in $changed_files; do + ansible-test sanity --docker -v --color $file + done working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}} - name: Install yamllint @@ -57,4 +61,4 @@ jobs: - name: Run yamllint if: steps.changed_files.outputs.changed_files != '' run: yamllint -c .yamllint.yml ${{ steps.changed_files.outputs.changed_files }} - working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}} + working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}} \ No newline at end of file From 67ebb20bc6a4421bfd2b759a42c82bc23316b056 Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Mon, 11 Nov 2024 13:21:53 -0800 Subject: [PATCH 40/83] sanity tests only on modified files --- .github/workflows/sanity_tests_devel.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/sanity_tests_devel.yml b/.github/workflows/sanity_tests_devel.yml index ff2ab5877..3b0a8d0bc 100644 --- a/.github/workflows/sanity_tests_devel.yml +++ b/.github/workflows/sanity_tests_devel.yml @@ -51,7 +51,7 @@ jobs: run: | changed_files="${{ steps.changed_files.outputs.changed_files }}" for file in $changed_files; do - ansible-test sanity --docker -v --color $file + ansible-test sanity --docker -v --color yes $file done working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}} From 763efa0ea65a68d0a34498b4c1dc5bcb7587fe14 Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Mon, 11 Nov 2024 13:29:24 -0800 Subject: [PATCH 41/83] sanity tests only on modified files --- .github/workflows/sanity_tests_devel.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/sanity_tests_devel.yml b/.github/workflows/sanity_tests_devel.yml index 3b0a8d0bc..4373e2114 100644 --- a/.github/workflows/sanity_tests_devel.yml +++ b/.github/workflows/sanity_tests_devel.yml @@ -1,3 +1,4 @@ +--- name: CI Devel on: @@ -43,13 +44,12 @@ jobs: cd ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}} git fetch origin main changed_files=$(git diff --name-only origin/main HEAD) - echo "::set-output name=changed_files::$changed_files" + echo "changed_files=$changed_files" >> $GITHUB_ENV echo "Changed files: $changed_files" - name: Run sanity tests - if: steps.changed_files.outputs.changed_files != '' + if: env.changed_files != '' run: | - changed_files="${{ steps.changed_files.outputs.changed_files }}" for file in $changed_files; do ansible-test sanity --docker -v --color yes $file done @@ -59,6 +59,6 @@ jobs: run: pip install --user yamllint - name: Run yamllint - if: steps.changed_files.outputs.changed_files != '' - run: yamllint -c .yamllint.yml ${{ steps.changed_files.outputs.changed_files }} - working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}} \ No newline at end of file + if: env.changed_files != '' + run: yamllint -c .yamllint.yml ${{ env.changed_files }} + working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}} From fe4412b40cf9acd9659de17030ae9b5f22a95d33 Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Mon, 11 Nov 2024 13:33:35 -0800 Subject: [PATCH 42/83] sanity tests only on modified files --- .github/workflows/sanity_tests_devel.yml | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/.github/workflows/sanity_tests_devel.yml b/.github/workflows/sanity_tests_devel.yml index 4373e2114..f2d9d3e62 100644 --- a/.github/workflows/sanity_tests_devel.yml +++ b/.github/workflows/sanity_tests_devel.yml @@ -43,13 +43,14 @@ jobs: run: | cd ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}} git fetch origin main - changed_files=$(git diff --name-only origin/main HEAD) - echo "changed_files=$changed_files" >> $GITHUB_ENV - echo "Changed files: $changed_files" + git diff --name-only origin/main HEAD > changed_files.txt + echo "Changed files:" + cat changed_files.txt - name: Run sanity tests - if: env.changed_files != '' + if: steps.changed_files.outputs.changed_files != '' run: | + changed_files=$(cat changed_files.txt) for file in $changed_files; do ansible-test sanity --docker -v --color yes $file done @@ -59,6 +60,9 @@ jobs: run: pip install --user yamllint - name: Run yamllint - if: env.changed_files != '' - run: yamllint -c .yamllint.yml ${{ env.changed_files }} + if: steps.changed_files.outputs.changed_files != '' + run: | + changed_files=$(cat changed_files.txt) + yamllint -c .yamllint.yml $changed_files working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}} + From 80559c8da069231f9d852f4b0bcb602d9f25fee8 Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Mon, 11 Nov 2024 13:37:33 -0800 Subject: [PATCH 43/83] sanity tests only on modified files --- .github/workflows/sanity_tests_devel.yml | 26 ++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/.github/workflows/sanity_tests_devel.yml b/.github/workflows/sanity_tests_devel.yml index f2d9d3e62..814207eb9 100644 --- a/.github/workflows/sanity_tests_devel.yml +++ b/.github/workflows/sanity_tests_devel.yml @@ -48,21 +48,31 @@ jobs: cat changed_files.txt - name: Run sanity tests - if: steps.changed_files.outputs.changed_files != '' + if: ${{ steps.changed_files.outputs.changed_files != '' }} run: | - changed_files=$(cat changed_files.txt) - for file in $changed_files; do - ansible-test sanity --docker -v --color yes $file - done + if [ -s changed_files.txt ]; then + changed_files=$(cat changed_files.txt) + for file in $changed_files; do + ansible-test sanity --docker -v --color yes $file + done + else + echo "No changed files to test." + fi working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}} - name: Install yamllint run: pip install --user yamllint - name: Run yamllint - if: steps.changed_files.outputs.changed_files != '' + if: ${{ steps.changed_files.outputs.changed_files != '' }} run: | - changed_files=$(cat changed_files.txt) - yamllint -c .yamllint.yml $changed_files + if [ -s changed_files.txt ]; then + changed_files=$(cat changed_files.txt) + for file in $changed_files; do + yamllint -c .yamllint.yml $file + done + else + echo "No changed files to lint." + fi working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}} From 152201a73677b326e9d334bb5621fa8bb597a117 Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Mon, 11 Nov 2024 13:40:18 -0800 Subject: [PATCH 44/83] sanity tests only on modified files --- .github/workflows/sanity_tests_devel.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/sanity_tests_devel.yml b/.github/workflows/sanity_tests_devel.yml index 814207eb9..6e0c77d53 100644 --- a/.github/workflows/sanity_tests_devel.yml +++ b/.github/workflows/sanity_tests_devel.yml @@ -48,7 +48,6 @@ jobs: cat changed_files.txt - name: Run sanity tests - if: ${{ steps.changed_files.outputs.changed_files != '' }} run: | if [ -s changed_files.txt ]; then changed_files=$(cat changed_files.txt) @@ -64,7 +63,6 @@ jobs: run: pip install --user yamllint - name: Run yamllint - if: ${{ steps.changed_files.outputs.changed_files != '' }} run: | if [ -s changed_files.txt ]; then changed_files=$(cat changed_files.txt) From ce166d61651bd39062ef073fe0dee03012ff9c1f Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Mon, 11 Nov 2024 13:42:55 -0800 Subject: [PATCH 45/83] sanity tests only on modified files --- .github/workflows/sanity_tests_devel.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/sanity_tests_devel.yml b/.github/workflows/sanity_tests_devel.yml index 6e0c77d53..4ef7a822c 100644 --- a/.github/workflows/sanity_tests_devel.yml +++ b/.github/workflows/sanity_tests_devel.yml @@ -73,4 +73,3 @@ jobs: echo "No changed files to lint." fi working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}} - From 7003fc7c6a8ff90490f6a6f4ddb4046cc79c73c3 Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Mon, 11 Nov 2024 13:50:11 -0800 Subject: [PATCH 46/83] sanity tests only on modified files --- .github/workflows/sanity_tests.yml | 3 ++- .github/workflows/sanity_tests_devel.yml | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/sanity_tests.yml b/.github/workflows/sanity_tests.yml index d5f61e0e0..3bb7379ad 100644 --- a/.github/workflows/sanity_tests.yml +++ b/.github/workflows/sanity_tests.yml @@ -1,3 +1,4 @@ +--- name: CI on: @@ -5,7 +6,7 @@ on: branches: [main] schedule: - cron: '0 6 * * *' - workflow_dispatch: + workflow_dispatch: true env: NAMESPACE: cisco diff --git a/.github/workflows/sanity_tests_devel.yml b/.github/workflows/sanity_tests_devel.yml index 4ef7a822c..7e561a743 100644 --- a/.github/workflows/sanity_tests_devel.yml +++ b/.github/workflows/sanity_tests_devel.yml @@ -2,8 +2,8 @@ name: CI Devel on: - workflow_dispatch: - pull_request: + workflow_dispatch: true + pull_request: true env: NAMESPACE: cisco From 086cae2bb318545f6f6ec4089b3341238b93024d Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Mon, 11 Nov 2024 13:51:44 -0800 Subject: [PATCH 47/83] sanity tests only on modified files --- .github/workflows/sanity_tests.yml | 2 +- .github/workflows/sanity_tests_devel.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/sanity_tests.yml b/.github/workflows/sanity_tests.yml index 3bb7379ad..f631cd94e 100644 --- a/.github/workflows/sanity_tests.yml +++ b/.github/workflows/sanity_tests.yml @@ -6,7 +6,7 @@ on: branches: [main] schedule: - cron: '0 6 * * *' - workflow_dispatch: true + workflow_dispatch: env: NAMESPACE: cisco diff --git a/.github/workflows/sanity_tests_devel.yml b/.github/workflows/sanity_tests_devel.yml index 7e561a743..4ef7a822c 100644 --- a/.github/workflows/sanity_tests_devel.yml +++ b/.github/workflows/sanity_tests_devel.yml @@ -2,8 +2,8 @@ name: CI Devel on: - workflow_dispatch: true - pull_request: true + workflow_dispatch: + pull_request: env: NAMESPACE: cisco From 67ef0f1681d5b00ad88a7155c11776aa6fbcf3ab Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Mon, 11 Nov 2024 13:59:53 -0800 Subject: [PATCH 48/83] trigger circleci on pr creation instead of pr approval --- .github/workflows/main.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 504ed0950..df7944210 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -1,10 +1,11 @@ +--- name: main on: pull_request_review: types: [submitted] jobs: trigger-circleci: - if: github.event.review.state == 'approved' + # if: github.event.review.state == 'approved' runs-on: ubuntu-20.04 steps: - run: echo -n "${GITHUB_REF}" | sed -r 's/^refs\///' | sed -r 's/merge/head/' > github_ref From 88d0a8321ca35eba2f5791510ab02a4bc2bda450 Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Mon, 11 Nov 2024 14:04:40 -0800 Subject: [PATCH 49/83] trigger circleci on pr creation instead of pr approval --- .github/workflows/main.yml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index df7944210..cd8d17464 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -1,8 +1,11 @@ --- name: main + on: - pull_request_review: - types: [submitted] + pull_request: + # pull_request_review: + # types: [submitted] + jobs: trigger-circleci: # if: github.event.review.state == 'approved' From 5dd1cdba18d2988be4e6ad92295e6e4f89550149 Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Mon, 11 Nov 2024 14:40:45 -0800 Subject: [PATCH 50/83] trigger circleci on pr creation instead of pr approval --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index cd8d17464..20e07e4a4 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -8,7 +8,7 @@ on: jobs: trigger-circleci: - # if: github.event.review.state == 'approved' + if: github.event.review.state == 'approved' runs-on: ubuntu-20.04 steps: - run: echo -n "${GITHUB_REF}" | sed -r 's/^refs\///' | sed -r 's/merge/head/' > github_ref From 4eae795f0716a4e3ecbe95e3133b5b0a0deb3b06 Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Mon, 11 Nov 2024 14:49:25 -0800 Subject: [PATCH 51/83] test ci --- .../ccc_site_management/vars/vars_site_management.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/integration/ccc_site_management/vars/vars_site_management.yml b/tests/integration/ccc_site_management/vars/vars_site_management.yml index 96eb82e44..5a9aedfb3 100644 --- a/tests/integration/ccc_site_management/vars/vars_site_management.yml +++ b/tests/integration/ccc_site_management/vars/vars_site_management.yml @@ -37,7 +37,6 @@ design_sites: parent_name: Global/USA-Test site_type: area - - site: building: name: BLD10 From 070a958a8b906877b55ee8bb444c00e5685e53d4 Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Mon, 11 Nov 2024 14:51:44 -0800 Subject: [PATCH 52/83] test ci --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 20e07e4a4..cd8d17464 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -8,7 +8,7 @@ on: jobs: trigger-circleci: - if: github.event.review.state == 'approved' + # if: github.event.review.state == 'approved' runs-on: ubuntu-20.04 steps: - run: echo -n "${GITHUB_REF}" | sed -r 's/^refs\///' | sed -r 's/merge/head/' > github_ref From 6f4112cc8a6a01a07edb9f12aeac3051a5c7c425 Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Mon, 11 Nov 2024 19:36:46 -0800 Subject: [PATCH 53/83] test ci --- .circleci/config.yml | 38 ++++++++++--------- .../vars/vars_site_management.yml | 2 + 2 files changed, 22 insertions(+), 18 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 7a7969ad1..6932f0dc3 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -335,24 +335,26 @@ jobs: export ANSIBLE_ROLES_PATH=$PWD/tests/integration cat ccc_roles.yml echo $(circleci tests glob "tests/integration/*") - echo -n 0 > rc - cat ccc_roles.yml | circleci tests run --command "xargs ./run_tests.sh" --split-by=timings --timings-type=name || echo -n $? > rc - echo ${LOGS_SSH_KEY} | base64 -d > ~/id_ssh - chmod 600 ~/id_ssh - export NO_PROXY="" - export no_proxy="" - ssh $LOGS_MACHINE \ - -o StrictHostKeyChecking=no \ - -o UserKnownHostsFile=/dev/null \ - -i ~/id_ssh \ - mkdir -p /var/www/html/$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME/$CIRCLE_BUILD_NUM - scp \ - -i ~/id_ssh \ - -o StrictHostKeyChecking=no \ - -o UserKnownHostsFile=/dev/null \ - sanity_tests_logs_$CIRCLE_NODE_INDEX.log $LOGS_MACHINE:/var/www/html/$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME/$CIRCLE_BUILD_NUM - echo "LOGS URL: http://10.195.243.37/$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME/$CIRCLE_BUILD_NUM/sanity_tests_logs_$CIRCLE_NODE_INDEX.log" - if [[ $(cat rc) != "0" ]]; then exit 1; fi + cat ccc_roles.yml | circleci tests run --command "xargs ./run_tests.sh" --split-by=timings --timings-type=name + + # echo -n 0 > rc + # cat ccc_roles.yml | circleci tests run --command "xargs ./run_tests.sh" --split-by=timings --timings-type=name || echo -n $? > rc + # echo ${LOGS_SSH_KEY} | base64 -d > ~/id_ssh + # chmod 600 ~/id_ssh + # export NO_PROXY="" + # export no_proxy="" + # ssh $LOGS_MACHINE \ + # -o StrictHostKeyChecking=no \ + # -o UserKnownHostsFile=/dev/null \ + # -i ~/id_ssh \ + # mkdir -p /var/www/html/$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME/$CIRCLE_BUILD_NUM + # scp \ + # -i ~/id_ssh \ + # -o StrictHostKeyChecking=no \ + # -o UserKnownHostsFile=/dev/null \ + # sanity_tests_logs_$CIRCLE_NODE_INDEX.log $LOGS_MACHINE:/var/www/html/$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME/$CIRCLE_BUILD_NUM + # echo "LOGS URL: http://10.195.243.37/$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME/$CIRCLE_BUILD_NUM/sanity_tests_logs_$CIRCLE_NODE_INDEX.log" + # if [[ $(cat rc) != "0" ]]; then exit 1; fi no_output_timeout: 120m diff --git a/tests/integration/ccc_site_management/vars/vars_site_management.yml b/tests/integration/ccc_site_management/vars/vars_site_management.yml index 5a9aedfb3..cffbee2b9 100644 --- a/tests/integration/ccc_site_management/vars/vars_site_management.yml +++ b/tests/integration/ccc_site_management/vars/vars_site_management.yml @@ -37,6 +37,8 @@ design_sites: parent_name: Global/USA-Test site_type: area + + - site: building: name: BLD10 From 413cb8882e443229a163d02b6270c39ea7844119 Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Mon, 11 Nov 2024 19:50:09 -0800 Subject: [PATCH 54/83] test ci --- test-requirements.txt | 2 +- .../ccc_site_management/vars/vars_site_management.yml | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/test-requirements.txt b/test-requirements.txt index 22f9023e0..0ee5e3754 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -4,4 +4,4 @@ PyYAML==6.0.1 cryptography==42.0.7 paramiko==3.4.0 pyzipper==0.3.6 -ansible==6.7.0 \ No newline at end of file +ansible==8.7.0 \ No newline at end of file diff --git a/tests/integration/ccc_site_management/vars/vars_site_management.yml b/tests/integration/ccc_site_management/vars/vars_site_management.yml index cffbee2b9..5a9aedfb3 100644 --- a/tests/integration/ccc_site_management/vars/vars_site_management.yml +++ b/tests/integration/ccc_site_management/vars/vars_site_management.yml @@ -37,8 +37,6 @@ design_sites: parent_name: Global/USA-Test site_type: area - - - site: building: name: BLD10 From 9e6e5bf8c9b3fcdc8694cf63db4841c2f9b08a06 Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Mon, 11 Nov 2024 19:58:08 -0800 Subject: [PATCH 55/83] test ci --- .circleci/config.yml | 2 ++ test-requirements.txt | 3 +-- .../ccc_site_management/vars/vars_site_management.yml | 2 ++ 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 6932f0dc3..b6799590c 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -126,6 +126,7 @@ jobs: # Install ansible, dnacentersdk pip install --upgrade pip pip install -r test-requirements.txt + pip install ansible # Build collection and store resulting tarball in directory $HOME/.cache/v<< pipeline.parameters.ansible_cisco_dnac_version >>/collection-tarballs ansible-galaxy collection build --force --output-path workspace/ - save_cache: @@ -313,6 +314,7 @@ jobs: # Install ansible, dnacentersdk pip install --upgrade pip pip install -r test-requirements.txt + pip install ansible ansible --version - run: name: Install the collection tarball diff --git a/test-requirements.txt b/test-requirements.txt index 0ee5e3754..4ddefd14d 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -3,5 +3,4 @@ Jinja2==3.1.4 PyYAML==6.0.1 cryptography==42.0.7 paramiko==3.4.0 -pyzipper==0.3.6 -ansible==8.7.0 \ No newline at end of file +pyzipper==0.3.6 \ No newline at end of file diff --git a/tests/integration/ccc_site_management/vars/vars_site_management.yml b/tests/integration/ccc_site_management/vars/vars_site_management.yml index 5a9aedfb3..cffbee2b9 100644 --- a/tests/integration/ccc_site_management/vars/vars_site_management.yml +++ b/tests/integration/ccc_site_management/vars/vars_site_management.yml @@ -37,6 +37,8 @@ design_sites: parent_name: Global/USA-Test site_type: area + + - site: building: name: BLD10 From 396651fb6623411e6472979f07636019ad936c12 Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Mon, 11 Nov 2024 20:32:29 -0800 Subject: [PATCH 56/83] test ci --- meta/runtime.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/meta/runtime.yml b/meta/runtime.yml index 898ad8ff5..873e7979a 100644 --- a/meta/runtime.yml +++ b/meta/runtime.yml @@ -1,2 +1,2 @@ --- -requires_ansible: '>=2.15.0' +requires_ansible: '>=2.13.0' From 637f00b3918e8f517b1e0f6cd92e697863dc2ba0 Mon Sep 17 00:00:00 2001 From: Megha Kandari Date: Tue, 12 Nov 2024 10:41:42 +0530 Subject: [PATCH 57/83] bug fix for banner settings --- .../network_settings_workflow_manager.py | 34 ++++++++----------- 1 file changed, 15 insertions(+), 19 deletions(-) diff --git a/plugins/modules/network_settings_workflow_manager.py b/plugins/modules/network_settings_workflow_manager.py index 20390e7cd..061c209c3 100644 --- a/plugins/modules/network_settings_workflow_manager.py +++ b/plugins/modules/network_settings_workflow_manager.py @@ -827,13 +827,16 @@ def validate_input(self): for pool in ip_pool: # Check for 'dhcp_server_ips' - if not isinstance(pool["dhcp_server_ips"], list): + dhcp_server_ips = pool.get("dhcp_server_ips") + if dhcp_server_ips is not None and not isinstance(dhcp_server_ips, list): invalid_params_type.append("'dhcp_server_ips' should be a list.") - + # Check for 'dns_server_ips' - if not isinstance(pool["dns_server_ips"], list): + dns_server_ips = pool.get("dns_server_ips") + if dns_server_ips is not None and not isinstance(dns_server_ips, list): invalid_params_type.append("'dns_server_ips' should be a list.") + if invalid_params_type: self.msg = "Invalid required parameter(s): {0}".format(', '.join(invalid_params_type)) self.result['response'] = self.msg @@ -2834,11 +2837,11 @@ def get_want_network(self, network_management_details): if retain_existing_banner is not None: if retain_existing_banner is True: want_network_settings.get("messageOfTheday").update({ - "type": "Custom" + "type": "Builtin" }) else: want_network_settings.get("messageOfTheday").update({ - "type": "Builtin" + "type": "Custom" }) else: del want_network_settings["messageOfTheday"] @@ -3588,36 +3591,31 @@ def update_network(self, network_management): dhcp_settings = net_params.get("settings").get("dhcpServer") response = self.update_dhcp_settings_for_site(site_name, site_id, dhcp_settings) self.log("Received API response of 'set_dhcp_settings_for_a_site': {0}".format(response), "DEBUG") - validation_string = "desired common settings operation successful" - self.check_task_response_status(response, validation_string, "set_dhcp_settings_for_a_site").check_return_status() + self.check_tasks_response_status(response, "set_dhcp_settings_for_a_site").check_return_status() if net_params.get("settings").get("ntpServer"): ntp_settings = net_params.get("settings").get("ntpServer") response = self.update_ntp_settings_for_site(site_name, site_id, ntp_settings) self.log("Received API response of 'set_n_t_p_settings_for_a_site': {0}".format(response), "DEBUG") - validation_string = "desired common settings operation successful" - self.check_task_response_status(response, validation_string, "set_n_t_p_settings_for_a_site").check_return_status() + self.check_tasks_response_status(response, "set_n_t_p_settings_for_a_site").check_return_status() if net_params.get("settings").get("timezone"): time_zone_settings = net_params.get("settings").get("timezone") response = self.update_time_zone_settings_for_site(site_name, site_id, time_zone_settings) self.log("Received API response of 'set_time_zone_for_a_site': {0}".format(response), "DEBUG") - validation_string = "desired common settings operation successful" - self.check_task_response_status(response, validation_string, "set_time_zone_for_a_site").check_return_status() + self.check_tasks_response_status(response, "set_time_zone_for_a_site").check_return_status() if net_params.get("settings").get("dnsServer"): dns_settings = net_params.get("settings").get("dnsServer") response = self.update_dns_settings_for_site(site_name, site_id, dns_settings) self.log("Received API response of 'set_d_n_s_settings_for_a_site': {0}".format(response), "DEBUG") - validation_string = "desired common settings operation successful" - self.check_task_response_status(response, validation_string, "set_d_n_s_settings_for_a_site").check_return_status() + self.check_tasks_response_status(response, "set_d_n_s_settings_for_a_site").check_return_status() if net_params.get("settings").get("messageOfTheday"): banner_settings = net_params.get("settings").get("messageOfTheday") response = self.update_banner_settings_for_site(site_name, site_id, banner_settings) self.log("Received API response of 'set_banner_settings_for_a_site': {0}".format(response), "DEBUG") - validation_string = "desired common settings operation successful" - self.check_task_response_status(response, validation_string, "set_banner_settings_for_a_site").check_return_status() + self.check_tasks_response_status(response, "set_banner_settings_for_a_site").check_return_status() if all([ net_params.get("settings", {}).get("snmpServer"), @@ -3635,16 +3633,14 @@ def update_network(self, network_management): } response = self.update_telemetry_settings_for_site(site_name, site_id, telemetry_settings) self.log("Received API response of 'set_telemetry_settings_for_a_site': {0}".format(response), "DEBUG") - validation_string = "desired common settings operation successful" - self.check_task_response_status(response, validation_string, "set_telemetry_settings_for_a_site").check_return_status() + self.check_tasks_response_status(response, "set_telemetry_settings_for_a_site").check_return_status() if net_params.get("settings").get("network_aaa") or net_params.get("settings").get("client_and_endpoint_aaa"): network_aaa = net_params.get("settings").get("network_aaa") client_and_endpoint_aaa = net_params.get("settings").get("client_and_endpoint_aaa") response = self.update_aaa_settings_for_site(site_name, site_id, network_aaa, client_and_endpoint_aaa) self.log("Received API response of 'set_a_a_a_settings_for_a_site': {0}".format(response), "DEBUG") - validation_string = "desired common settings operation successful" - self.check_task_response_status(response, validation_string, "set_a_a_a_settings_for_a_site").check_return_status() + self.check_tasks_response_status(response, "set_a_a_a_settings_for_a_site").check_return_status() self.log("Network under the site '{0}' has been changed successfully".format(site_name), "INFO") result_network.get("msg") \ From e0493d0ee2b409c7bb3b0d0f392820605e1dbeb4 Mon Sep 17 00:00:00 2001 From: Syed-khadeerahmed Date: Tue, 12 Nov 2024 11:23:45 +0530 Subject: [PATCH 58/83] review comments addressed --- plugins/modules/inventory_workflow_manager.py | 26 +++++-------------- 1 file changed, 6 insertions(+), 20 deletions(-) diff --git a/plugins/modules/inventory_workflow_manager.py b/plugins/modules/inventory_workflow_manager.py index 50680ade6..20d8b4454 100644 --- a/plugins/modules/inventory_workflow_manager.py +++ b/plugins/modules/inventory_workflow_manager.py @@ -1791,46 +1791,32 @@ def wait_for_device_managed_state(self, device_ip, retry_count, retry_interval): self.log("Device '{0}' did not transition to the Managed state within the retry limit.".format(device_ip), "WARNING") return False - def provision_wired_device_v1(self, device_ip, site_name, device_type): + def provision_wired_device_v1(self, device_ip, site_name_hierarchy, device_type): """ Provisions a device for versions <= 2.3.5.6. Parameters: device_ip (str): The IP address of the device to provision. - site_name (str): The name of the site where the device will be provisioned. + site_name_hierarchy (str): The name of the site where the device will be provisioned. device_type (str): The type of device being provisioned. - Returns: - self (object): An instance of the class after the provision operation is performed. Description: This method provisions a device with the specified IP address, site name, and device type for software versions 2.3.5.6 or earlier. It handles the necessary configurations and returns a success status. """ - provision_params = {'deviceManagementIpAddress': device_ip, 'siteNameHierarchy': site_name} + provision_params = {'deviceManagementIpAddress': device_ip, 'siteNameHierarchy': site_name_hierarchy} try: response = self.dnac._exec(family="sda", function='provision_wired_device', op_modifies=True, params=provision_params) self.log("Received API response from 'provision_wired_device': {0}".format(response), "DEBUG") if response: - exec_id = response.get("executionId") - response = self.get_execution_details(exec_id) - while True: - if response.get("status") == "SUCCESS": - self.log("Device: {0} successfully provisioned to the site {1}".format(device_ip, site_name), "INFO") - self.provision_count += 1 - self.provisioned_device.append(device_ip) - break - elif response.get("status") == "FAILURE": - self.log("Failed to provision device: {0}".format(device_ip), "ERROR") - raise Exception - else: - self.log("Provisioning in progress for device: {0}".format(device_ip), "DEBUG") + self.check_execution_response_status(response, "provision_wired_device").check_return_status() + self.provision_count += 1 + self.provisioned_device.append(device_ip) except Exception as e: self.handle_provisioning_exception(device_ip, e, device_type) - return self - def provision_wired_device_v2(self, device_ip, site_name): """ Provisions a device for versions > 2.3.5.6. From 57bd5846cc2f090fabc11a2db2fd52527a127b6c Mon Sep 17 00:00:00 2001 From: Syed-khadeerahmed Date: Tue, 12 Nov 2024 11:47:22 +0530 Subject: [PATCH 59/83] review comments addressed --- plugins/modules/inventory_workflow_manager.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/plugins/modules/inventory_workflow_manager.py b/plugins/modules/inventory_workflow_manager.py index 20d8b4454..3a49eac70 100644 --- a/plugins/modules/inventory_workflow_manager.py +++ b/plugins/modules/inventory_workflow_manager.py @@ -1700,7 +1700,7 @@ def provisioned_wired_device(self): for device_info in provision_wired_list: device_ip = device_info['device_ip'] - site_name = device_info['site_name'] + site_name_hierarchy = device_info['site_name'] device_ip_list.append(device_ip) device_type = "Wired" resync_retry_count = device_info.get("resync_retry_count", 200) @@ -1726,9 +1726,9 @@ def provisioned_wired_device(self): continue if self.get_ccc_version_as_integer() <= self.get_ccc_version_as_int_from_str("2.3.5.3"): - self.provision_wired_device_v1(device_ip, site_name, device_type) + self.provision_wired_device_v1(device_ip, site_name_hierarchy, device_type) else: - self.provision_wired_device_v2(device_ip, site_name) + self.provision_wired_device_v2(device_ip, site_name_hierarchy) # Handle final provisioning results self.handle_final_provisioning_result(total_devices, self.provision_count, self.already_provisioned_count, device_ip_list, device_type) @@ -1817,7 +1817,7 @@ def provision_wired_device_v1(self, device_ip, site_name_hierarchy, device_type) except Exception as e: self.handle_provisioning_exception(device_ip, e, device_type) - def provision_wired_device_v2(self, device_ip, site_name): + def provision_wired_device_v2(self, device_ip, site_name_hierarchy): """ Provisions a device for versions > 2.3.5.6. Parameters: @@ -1829,7 +1829,7 @@ def provision_wired_device_v2(self, device_ip, site_name): It performs the necessary configurations and returns a success status. """ try: - site_exist, site_id = self.get_site_id(site_name) + site_exist, site_id = self.get_site_id(site_name_hierarchy) device_ids = self.get_device_ids([device_ip]) device_id = device_ids[0] @@ -1839,7 +1839,7 @@ def provision_wired_device_v2(self, device_ip, site_name): is_device_assigned_to_site = self.is_device_assigned_to_site(device_id) if not is_device_assigned_to_site: - self.assign_device_to_site(device_ids, site_name, site_id) + self.assign_device_to_site(device_ids, site_name_hierarchy, site_id) if not is_device_provisioned: self.provision_device(provision_params, device_ip) From e61ee58b284268e4a8531d7e120926edf09cab96 Mon Sep 17 00:00:00 2001 From: Megha Kandari Date: Tue, 12 Nov 2024 12:14:45 +0530 Subject: [PATCH 60/83] bug fix for banner settings --- plugins/modules/network_settings_workflow_manager.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/plugins/modules/network_settings_workflow_manager.py b/plugins/modules/network_settings_workflow_manager.py index 061c209c3..23ffd4e2a 100644 --- a/plugins/modules/network_settings_workflow_manager.py +++ b/plugins/modules/network_settings_workflow_manager.py @@ -2828,11 +2828,6 @@ def get_want_network(self, network_management_details): message_of_the_day = item.get("message_of_the_day") if message_of_the_day is not None: - if message_of_the_day.get("banner_message") is not None: - want_network_settings.get("messageOfTheday").update({ - "message": - message_of_the_day.get("banner_message") - }) retain_existing_banner = message_of_the_day.get("retain_existing_banner") if retain_existing_banner is not None: if retain_existing_banner is True: @@ -2843,6 +2838,11 @@ def get_want_network(self, network_management_details): want_network_settings.get("messageOfTheday").update({ "type": "Custom" }) + if message_of_the_day.get("banner_message") is not None: + want_network_settings.get("messageOfTheday").update({ + "message": + message_of_the_day.get("banner_message") + }) else: del want_network_settings["messageOfTheday"] From 1f85358f18096acdeb9ba7ed373c8fbd1ba2ca35 Mon Sep 17 00:00:00 2001 From: Syed-khadeerahmed Date: Tue, 12 Nov 2024 12:47:19 +0530 Subject: [PATCH 61/83] minor change done --- plugins/modules/swim_workflow_manager.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/plugins/modules/swim_workflow_manager.py b/plugins/modules/swim_workflow_manager.py index ce04dda10..fa997883e 100644 --- a/plugins/modules/swim_workflow_manager.py +++ b/plugins/modules/swim_workflow_manager.py @@ -1609,9 +1609,6 @@ def get_diff_tagging(self): self.log(self.msg, "INFO") return self else: - self.log("inside else for already tagged") - self.log(len(already_un_tagged_device_role)) - self.log(len(device_role_no)) if len(already_un_tagged_device_role) == len(device_role_no): self.log("inside logic") self.status = "success" From 0977146dc75870479d1bba247315018dc0939dc2 Mon Sep 17 00:00:00 2001 From: Syed-khadeerahmed Date: Tue, 12 Nov 2024 13:12:41 +0530 Subject: [PATCH 62/83] document fixed --- plugins/modules/provision_workflow_manager.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plugins/modules/provision_workflow_manager.py b/plugins/modules/provision_workflow_manager.py index f7a96db6b..5e68d55c8 100644 --- a/plugins/modules/provision_workflow_manager.py +++ b/plugins/modules/provision_workflow_manager.py @@ -95,10 +95,10 @@ elements: dict suboptions: interface_name: - description: The name of the interface. (Required parameter for Cisco Catalyst Version - 2.3.7.6) + description: The name of the interface. type: str vlan_id: - description: The VLAN ID associated with the interface. (Required parameter for Cisco Catalyst Version - 2.3.7.6) + description: The VLAN ID associated with the interface. type: str interface_ip_address: description: The IP address assigned to the interface. From 8715ff01278d2de1536da4aa3bbbae89c4fb78ce Mon Sep 17 00:00:00 2001 From: Megha Kandari Date: Tue, 12 Nov 2024 13:16:50 +0530 Subject: [PATCH 63/83] bug fix for banner settings --- plugins/modules/network_settings_workflow_manager.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/plugins/modules/network_settings_workflow_manager.py b/plugins/modules/network_settings_workflow_manager.py index 23ffd4e2a..96d56e283 100644 --- a/plugins/modules/network_settings_workflow_manager.py +++ b/plugins/modules/network_settings_workflow_manager.py @@ -830,13 +830,12 @@ def validate_input(self): dhcp_server_ips = pool.get("dhcp_server_ips") if dhcp_server_ips is not None and not isinstance(dhcp_server_ips, list): invalid_params_type.append("'dhcp_server_ips' should be a list.") - + # Check for 'dns_server_ips' dns_server_ips = pool.get("dns_server_ips") if dns_server_ips is not None and not isinstance(dns_server_ips, list): invalid_params_type.append("'dns_server_ips' should be a list.") - if invalid_params_type: self.msg = "Invalid required parameter(s): {0}".format(', '.join(invalid_params_type)) self.result['response'] = self.msg From 6f51099e5dbf1c58b64e5742da4d139bf9b8bf23 Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Tue, 12 Nov 2024 01:38:09 -0800 Subject: [PATCH 64/83] test ci --- .circleci/config.yml | 159 +++++++++++++++++++++--------------------- meta/runtime.yml | 2 +- test-requirements.txt | 3 +- 3 files changed, 82 insertions(+), 82 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index b6799590c..8fe69f5ed 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -126,7 +126,6 @@ jobs: # Install ansible, dnacentersdk pip install --upgrade pip pip install -r test-requirements.txt - pip install ansible # Build collection and store resulting tarball in directory $HOME/.cache/v<< pipeline.parameters.ansible_cisco_dnac_version >>/collection-tarballs ansible-galaxy collection build --force --output-path workspace/ - save_cache: @@ -314,7 +313,6 @@ jobs: # Install ansible, dnacentersdk pip install --upgrade pip pip install -r test-requirements.txt - pip install ansible ansible --version - run: name: Install the collection tarball @@ -337,6 +335,7 @@ jobs: export ANSIBLE_ROLES_PATH=$PWD/tests/integration cat ccc_roles.yml echo $(circleci tests glob "tests/integration/*") + env cat ccc_roles.yml | circleci tests run --command "xargs ./run_tests.sh" --split-by=timings --timings-type=name # echo -n 0 > rc @@ -372,43 +371,43 @@ jobs: command: | python ${HOME}/static/pnp_script.py #TODO - main-pr: - docker: - - image: maniator/gh:v2.49.2 - resource_class: cisco-en-programmability/catalyst-center-ansible-runner-main - steps: - - run: - name: Clone repo to workspace - command: git clone --depth=1 -b $CIRCLE_BRANCH https://github.com/$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME.git . - - run: - name: Create release pr to main - command: | - gh repo set-default $CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME - gh pr create \ - --base main \ - --head $CIRCLE_BRANCH \ - --title "Release v<< pipeline.parameters.ansible_cisco_dnac_version >>" \ - --body "Release v<< pipeline.parameters.ansible_cisco_dnac_version >>" - - release-job: - docker: - - image: python:3.8.10 - resource_class: cisco-en-programmability/catalyst-center-ansible-runner-main - steps: - - run: - name: Clone repo to workspace - command: | - git clone --depth=1 https://github.com/$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME.git . - git fetch origin $CIRCLE_BRANCH:work - git checkout work - - restore_cache: - keys: - - collection-<< pipeline.git.revision >> - # - add_ssh_keys: - # fingerprints: - # - "KEY_FINGERPRINT" - - run: - echo create release + # main-pr: + # docker: + # - image: maniator/gh:v2.49.2 + # resource_class: cisco-en-programmability/catalyst-center-ansible-runner-main + # steps: + # - run: + # name: Clone repo to workspace + # command: git clone --depth=1 -b $CIRCLE_BRANCH https://github.com/$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME.git . + # - run: + # name: Create release pr to main + # command: | + # gh repo set-default $CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME + # gh pr create \ + # --base main \ + # --head $CIRCLE_BRANCH \ + # --title "Release v<< pipeline.parameters.ansible_cisco_dnac_version >>" \ + # --body "Release v<< pipeline.parameters.ansible_cisco_dnac_version >>" + + # release-job: + # docker: + # - image: python:3.8.10 + # resource_class: cisco-en-programmability/catalyst-center-ansible-runner-main + # steps: + # - run: + # name: Clone repo to workspace + # command: | + # git clone --depth=1 https://github.com/$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME.git . + # git fetch origin $CIRCLE_BRANCH:work + # git checkout work + # - restore_cache: + # keys: + # - collection-<< pipeline.git.revision >> + # # - add_ssh_keys: + # # fingerprints: + # # - "KEY_FINGERPRINT" + # - run: + # echo create release # git tag \ # -a v<< pipeline.parameters.ansible_cisco_dnac_version >> \ # -m "Ansible DNACCollection Version v<< pipeline.parameters.ansible_cisco_dnac_version >>" @@ -487,49 +486,49 @@ workflows: - addrole context: - dnac-servers - - logs-vm + # - logs-vm # - post_pnp_testing: # requires: # - sanity-tests - release-candidate: - when: - or: - - equal: [run-release-prep, << pipeline.parameters.GHA_Meta >>] - jobs: - - build - - addrole: - matrix: - parameters: - run-all: - - true - - sanity-tests: - requires: - - addrole - - build - context: - - dnac-servers - - hold: - type: approval - requires: - - sanity-tests - - - main-pr: - context: - - gh-token - requires: - - hold - - release: - when: - or: - - equal: [run-release, << pipeline.parameters.GHA_Meta >>] - jobs: - - build - - release-job: - requires: - - build - context: - - gh-token - - galaxy-token + # release-candidate: + # when: + # or: + # - equal: [run-release-prep, << pipeline.parameters.GHA_Meta >>] + # jobs: + # - build + # - addrole: + # matrix: + # parameters: + # run-all: + # - true + # - sanity-tests: + # requires: + # - addrole + # - build + # context: + # - dnac-servers + # - hold: + # type: approval + # requires: + # - sanity-tests + + # - main-pr: + # context: + # - gh-token + # requires: + # - hold + + # release: + # when: + # or: + # - equal: [run-release, << pipeline.parameters.GHA_Meta >>] + # jobs: + # - build + # - release-job: + # requires: + # - build + # context: + # - gh-token + # - galaxy-token diff --git a/meta/runtime.yml b/meta/runtime.yml index 873e7979a..898ad8ff5 100644 --- a/meta/runtime.yml +++ b/meta/runtime.yml @@ -1,2 +1,2 @@ --- -requires_ansible: '>=2.13.0' +requires_ansible: '>=2.15.0' diff --git a/test-requirements.txt b/test-requirements.txt index 4ddefd14d..22f9023e0 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -3,4 +3,5 @@ Jinja2==3.1.4 PyYAML==6.0.1 cryptography==42.0.7 paramiko==3.4.0 -pyzipper==0.3.6 \ No newline at end of file +pyzipper==0.3.6 +ansible==6.7.0 \ No newline at end of file From 74215b896736331e52a6f82c20f0d85b45432950 Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Tue, 12 Nov 2024 01:53:53 -0800 Subject: [PATCH 65/83] test ci --- .../ccc_site_management/vars/vars_site_management.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/integration/ccc_site_management/vars/vars_site_management.yml b/tests/integration/ccc_site_management/vars/vars_site_management.yml index cffbee2b9..96eb82e44 100644 --- a/tests/integration/ccc_site_management/vars/vars_site_management.yml +++ b/tests/integration/ccc_site_management/vars/vars_site_management.yml @@ -38,7 +38,6 @@ design_sites: site_type: area - - site: building: name: BLD10 From cd31ddf348b9da5d5486060b545aafea05c2dcfe Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Tue, 12 Nov 2024 02:01:48 -0800 Subject: [PATCH 66/83] test ci --- .../ccc_site_management/vars/vars_site_management.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/integration/ccc_site_management/vars/vars_site_management.yml b/tests/integration/ccc_site_management/vars/vars_site_management.yml index 96eb82e44..ab1c5b60e 100644 --- a/tests/integration/ccc_site_management/vars/vars_site_management.yml +++ b/tests/integration/ccc_site_management/vars/vars_site_management.yml @@ -38,6 +38,8 @@ design_sites: site_type: area + + - site: building: name: BLD10 From a22f6275b2344d9391df13543fce4125a75ef9ab Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Tue, 12 Nov 2024 02:24:35 -0800 Subject: [PATCH 67/83] test ci --- .circleci/config.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 8fe69f5ed..c1352eb60 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -336,6 +336,7 @@ jobs: cat ccc_roles.yml echo $(circleci tests glob "tests/integration/*") env + cat run_tests.sh cat ccc_roles.yml | circleci tests run --command "xargs ./run_tests.sh" --split-by=timings --timings-type=name # echo -n 0 > rc From 47846b30b04ff1e5d89f8112f876fe6869b0a6ea Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Tue, 12 Nov 2024 02:51:15 -0800 Subject: [PATCH 68/83] test ci --- .circleci/config.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index c1352eb60..3e0d25e64 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -337,6 +337,7 @@ jobs: echo $(circleci tests glob "tests/integration/*") env cat run_tests.sh + find / -name run_tests.sh cat ccc_roles.yml | circleci tests run --command "xargs ./run_tests.sh" --split-by=timings --timings-type=name # echo -n 0 > rc From caa6dd57e25be60e8f7addeeeeb34b55eb060dba Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Tue, 12 Nov 2024 02:59:31 -0800 Subject: [PATCH 69/83] test ci --- run_tests.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/run_tests.sh b/run_tests.sh index cb3041695..644a6292f 100755 --- a/run_tests.sh +++ b/run_tests.sh @@ -21,4 +21,4 @@ do echo " - $role" >> ccc_test_roles.yml done -ansible-playbook -i hosts ccc_test_roles.yml > "sanity_tests_logs_$CIRCLE_NODE_INDEX.log" +ansible-playbook -i hosts ccc_test_roles.yml From 17217241235325333b5284ef6866b11d25d7931d Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Tue, 12 Nov 2024 03:07:03 -0800 Subject: [PATCH 70/83] test ci --- .../ccc_site_management/vars/vars_site_management.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/integration/ccc_site_management/vars/vars_site_management.yml b/tests/integration/ccc_site_management/vars/vars_site_management.yml index ab1c5b60e..96eb82e44 100644 --- a/tests/integration/ccc_site_management/vars/vars_site_management.yml +++ b/tests/integration/ccc_site_management/vars/vars_site_management.yml @@ -38,8 +38,6 @@ design_sites: site_type: area - - - site: building: name: BLD10 From 3f9ffe3128b6825012e14217cd435c85131179c4 Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Tue, 12 Nov 2024 03:13:49 -0800 Subject: [PATCH 71/83] test ci --- .github/workflows/sanity_tests_devel.yml | 28 ++++++++++++++++-------- 1 file changed, 19 insertions(+), 9 deletions(-) diff --git a/.github/workflows/sanity_tests_devel.yml b/.github/workflows/sanity_tests_devel.yml index 4ef7a822c..3c80be105 100644 --- a/.github/workflows/sanity_tests_devel.yml +++ b/.github/workflows/sanity_tests_devel.yml @@ -62,14 +62,24 @@ jobs: - name: Install yamllint run: pip install --user yamllint - - name: Run yamllint - run: | - if [ -s changed_files.txt ]; then - changed_files=$(cat changed_files.txt) - for file in $changed_files; do - yamllint -c .yamllint.yml $file - done + - name: Run yamllint + run: | + if [ -s changed_files.txt ]; then + changed_files=$(cat changed_files.txt) + lintable_files="" + for file in $changed_files; do + # Check if the file belongs to the plugins/modules or playbooks directory + if [[ $file == plugins/modules/* || $file == playbooks/* ]]; then + lintable_files="$lintable_files $file" + fi + done + + if [ -n "$lintable_files" ]; then + yamllint -c .yamllint.yml $lintable_files else - echo "No changed files to lint." + echo "No relevant files to lint." fi - working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}} + else + echo "No changed files to lint." + fi + working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}} \ No newline at end of file From 386ad7ce5a78ecfd826d7e49190e07db609472e3 Mon Sep 17 00:00:00 2001 From: Syed-khadeerahmed Date: Tue, 12 Nov 2024 23:04:07 +0530 Subject: [PATCH 72/83] coding done --- playbooks/swim_workflow_manager.yml | 50 ++++++++++++--------- plugins/modules/swim_workflow_manager.py | 55 +++++++++++------------- 2 files changed, 54 insertions(+), 51 deletions(-) diff --git a/playbooks/swim_workflow_manager.yml b/playbooks/swim_workflow_manager.yml index 2f2073f60..644151add 100644 --- a/playbooks/swim_workflow_manager.yml +++ b/playbooks/swim_workflow_manager.yml @@ -22,24 +22,34 @@ dnac_api_task_timeout: 1000 dnac_task_poll_interval: 1 config: - - tagging_details: - image_name: cat9k_iosxe.17.15.01.SPA.bin - device_role: core, distribution - device_image_family_name: Cisco Catalyst 9300 Switch - # site_name: Global/Chennai/LTTS/FLOOR1 - tagging: False + - import_image_details: + type: remote + url_details: + payload: + - source_url: + - "http://172.21.236.183/swim/V1712_2_CCO/cat9k_iosxe.17.12.02.SPA.bin" + third_party: False + tagging_details: + image_name: cat9k_iosxe.17.12.02.SPA.bin + device_role: ALL + device_image_family_name: Cisco Catalyst 9300 Switch + site_name: "{{item.site_name}}" + tagging: True + # image_distribution_details: + # image_name: cat9k_iosxe.17.12.02.SPA.bin + # site_name: "{{item.site_name}}" + # device_role: "{{ item.device_role }}" + # device_family_name: "{{ item.device_family_name }}" + # device_series_name: "Catalyst 9300 Series" + image_activation_details: + image_name: cat9k_iosxe.17.12.02.SPA.bin + site_name: "{{item.site_name}}" + device_role: "{{ item.device_role }}" + device_family_name: "{{ item.device_family_name }}" + device_series_name: "Catalyst 9300 Series" + scehdule_validate: False + distribute_if_needed: True - - # - image_distribution_details: - # site_name: Global/Chennai/LTTS/FLOOR1 - # device_image_family_name: Cisco Catalyst 9300 Switch - # device_role: CORE - # device_family_name: Switches and Hubs - # device_series_name: Cisco Catalyst 9300 Series Switches - - # - image_distribution_details: - # # image_name: cat9k_iosxe.17.12.01.SPA.bin - # site_name: Global/USA/San Francisco/BGL_18 - # device_role: ALL - # device_family_name: Switches and Hubs - # device_series_name: Cisco Catalyst 9300 Series Switches \ No newline at end of file + with_items: "{{ image_details }}" + tags: + - swim diff --git a/plugins/modules/swim_workflow_manager.py b/plugins/modules/swim_workflow_manager.py index fa997883e..0aa7020ee 100644 --- a/plugins/modules/swim_workflow_manager.py +++ b/plugins/modules/swim_workflow_manager.py @@ -177,23 +177,21 @@ description: SWIM image name which will be tagged or untagged as golden. type: str device_role: - description: Defines the device role, with permissible values including ALL, UNKNOWN, ACCESS, BORDER ROUTER, - DISTRIBUTION, and CORE. - ALL - This role typically represents all devices within the network, regardless of their specific roles or functions. - UNKNOWN - This role is assigned to devices whose roles or functions have not been identified or classified within Cisco Catalsyt Center. - This could happen if the platform is unable to determine the device's role based on available information. - ACCESS - This role typically represents switches or access points that serve as access points for end-user devices to connect to the network. - These devices are often located at the edge of the network and provide connectivity to end-user devices. - BORDER ROUTER - These are devices that connect different network domains or segments together. They often serve as - gateways between different networks, such as connecting an enterprise network to the internet or connecting - multiple branch offices. - DISTRIBUTION - This role represents function as distribution switches or routers in hierarchical network designs. They aggregate traffic - from access switches and route it toward the core of the network or toward other distribution switches. - CORE - This role typically represents high-capacity switches or routers that form the backbone of the network. They handle large volumes - of traffic and provide connectivity between different parts of network, such as connecting distribution switches or - providing interconnection between different network segments. - Only the end-state device role will be marked as golden. For example, if a device is already marked as golden for ACCESS, and it needs to be marked - as golden for DISTRIBUTION and CORE, the system will first un-tag the golden status for ACCESS before marking DISTRIBUTION and CORE as golden. + description: | + Specifies the device role(s) for tagging or untagging the image as golden. + Permissible values: + - `ALL`: Applies the golden tag to all devices, regardless of role. + - `UNKNOWN`: For devices without a specific classification. + - `ACCESS`: Devices connecting end-user devices (e.g., access switches). + - `BORDER ROUTER`: Devices linking different network segments or domains. + - `DISTRIBUTION`: Devices aggregating traffic towards the core. + - `CORE`: Backbone devices handling high-volume network traffic. + Behavior: + - If `device_role` is provided as a single string (e.g., `"ACCESS"`), that role alone will be tagged as golden. + - If `device_role` is provided as a single string with multiple roles (e.g., `"ACCESS,CORE"`), both roles will be tagged as golden. + Examples: + - Given `device_role: "ACCESS"`, only the `ACCESS` role will be tagged as golden. + - If `device_role: "ACCESS,CORE"` is used, both `ACCESS` and `CORE` roles will be tagged as golden. type: str device_image_family_name: description: Device Image family name(Eg Cisco Catalyst 9300 Switch) @@ -464,7 +462,7 @@ site_name: Global/USA/San Francisco/BGL_18 tagging: True -- name: Tag the specified image as golden for multiple device roles and load it onto the device +- name: Tag the specified image as golden for multiple device roles and load it into the device cisco.dnac.swim_workflow_manager: dnac_host: "{{dnac_host}}" dnac_username: "{{dnac_username}}" @@ -478,7 +476,7 @@ config: - tagging_details: image_name: cat9k_iosxe.17.12.01.SPA.bin - device_role: ("ACCESS","DISTRIBUTION") + device_role: ACCESS,CORE device_image_family_name: Cisco Catalyst 9300 Switch site_name: Global/USA/San Francisco/BGL_18 tagging: True @@ -1543,9 +1541,7 @@ def get_diff_tagging(self): tag_image_golden = tagging_details.get("tagging") image_name = self.get_image_name_from_id(self.have.get("tagging_image_id")) device_role = tagging_details.get("device_role", "ALL") - device_role_no = [] - already_un_tagged_device_role = [] - already_tagged_device_role = [] + device_role_no, already_un_tagged_device_role, already_tagged_device_role = [], [], [] device_roles = ["core", "distribution", "access", "border router", "unknown", "all"] @@ -1590,13 +1586,11 @@ def get_diff_tagging(self): msg = "SWIM Image '{0}' already tagged as Golden image in Cisco Catalyst Center".format(image_name) self.log(msg, "INFO") already_tagged_device_role.append(role) - if not image_status and image_status == tag_image_golden: msg = "SWIM Image '{0}' already un-tagged from Golden image in Cisco Catalyst Center".format(image_name) self.log(msg, "INFO") already_un_tagged_device_role.append(role) - # Check if all roles are tagged as Golden if tag_image_golden: @@ -1637,7 +1631,7 @@ def get_diff_tagging(self): ) self.log("Received API response from 'tag_as_golden_image': {0}".format(str(response)), "DEBUG") - else: + else: for role in device_role.split(','): image_params = { "image_id": self.have.get("tagging_image_id"), @@ -1681,7 +1675,8 @@ def get_diff_tagging(self): if not task_details.get("isError") and 'successful' in task_details.get("progress"): self.status = "success" self.result['changed'] = True - self.msg = "Tagging image {0} golden for site {1} for family {2} for device deviceTag - {3} successful".format(image_name, site_name, device_family, device_role) + self.msg = ("Tagging image {0} golden for site {1} for family {2} for device deviceTag" + " - {3} successful".format(image_name, site_name, device_family, device_role)) self.result['msg'] = self.msg self.result['response'] = self.msg self.log(self.msg, "INFO") @@ -1690,7 +1685,8 @@ def get_diff_tagging(self): if not task_details.get("isError") and 'successful' in task_details.get("progress"): self.status = "success" self.result['changed'] = True - self.msg = "Un-Tagging image {0} golden for site {1} for family {2} for device deviceTag - {3} successful".format(image_name, site_name, device_family, device_role) + self.msg = ("Un-Tagging image {0} golden for site {1} for family {2} for device deviceTag" + " - {3} successful".format(image_name, site_name, device_family, device_role)) self.result['msg'] = self.msg self.result['response'] = self.msg self.log(self.msg, "INFO") @@ -1704,10 +1700,8 @@ def get_diff_tagging(self): self.result['msg'] = failure_reason self.log(self.msg, "ERROR") break - return self - def get_device_ip_from_id(self, device_id): """ Retrieve the management IP address of a device from Cisco Catalyst Center using its ID. @@ -2206,7 +2200,6 @@ def verify_diff_tagged(self): return self - def verify_diff_distributed(self): """ Verify the distribution status of a software image in Cisco Catalyst Center. @@ -2374,4 +2367,4 @@ def main(): if __name__ == '__main__': - main() \ No newline at end of file + main() From 4cb4115d926691efff3bc252383bfc2b24cbf739 Mon Sep 17 00:00:00 2001 From: JosePabloOcampo1212 Date: Tue, 12 Nov 2024 16:34:06 -0600 Subject: [PATCH 73/83] remove duplicates snmpRwCommunity --- changelogs/changelog.yaml | 9 ++++++++- plugins/modules/discovery.py | 10 ++-------- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 41a12800e..ea27d83ee 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -1070,4 +1070,11 @@ releases: release_summary: modifications to fix issues '#200'. minor_changes: - Removing duplicates in the discovery.py module - - Variable initialization in the event_subscription.py action \ No newline at end of file + - Variable initialization in the event_subscription.py action + + 6.24.0: + release_date: "2024-11-12" + changes: + release_summary: modifications to fix issues '#200'. + minor_changes: + - Removing duplicates in the discovery.py module. snmpRwCommunity property. \ No newline at end of file diff --git a/plugins/modules/discovery.py b/plugins/modules/discovery.py index 579b4cf1e..ca05a5bd7 100644 --- a/plugins/modules/discovery.py +++ b/plugins/modules/discovery.py @@ -175,12 +175,6 @@ snmpPrivProtocol: description: SNMP privacy protocol. 'AES128'. type: str - snmpRWCommunity: - description: SNMP RW community of the devices to be discovered. - type: str - snmpRWCommunityDesc: - description: Description for SNMP RW community. - type: str snmpRoCommunity: description: SNMP RO community of the devices to be discovered. type: str @@ -364,8 +358,8 @@ snmpPrivProtocol: string snmpRoCommunity: string snmpRoCommunityDesc: string - snmpRWCommunity: string - snmpRWCommunityDesc: string + snmpRwCommunity: string + snmpRwCommunityDesc: string snmpUserName: string snmpVersion: string timeOut: 0 From 015916755fbbf243d7013a2ae5336dbd3d7c70c7 Mon Sep 17 00:00:00 2001 From: Syed-khadeerahmed Date: Wed, 13 Nov 2024 10:08:08 +0530 Subject: [PATCH 74/83] minor fix in the code --- plugins/modules/swim_workflow_manager.py | 1 - 1 file changed, 1 deletion(-) diff --git a/plugins/modules/swim_workflow_manager.py b/plugins/modules/swim_workflow_manager.py index 0aa7020ee..cd57ca8f3 100644 --- a/plugins/modules/swim_workflow_manager.py +++ b/plugins/modules/swim_workflow_manager.py @@ -1604,7 +1604,6 @@ def get_diff_tagging(self): return self else: if len(already_un_tagged_device_role) == len(device_role_no): - self.log("inside logic") self.status = "success" self.result['changed'] = False self.msg = "SWIM Image '{0}' already un-tagged as Golden image in Cisco Catalyst Center for the roles - {1}.".format(image_name, device_role) From d62436745819725b78737d7d96438baba216f15c Mon Sep 17 00:00:00 2001 From: Abhishek-121 Date: Wed, 13 Nov 2024 10:21:20 +0530 Subject: [PATCH 75/83] update the log message --- .../sda_fabric_sites_zones_workflow_manager.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/plugins/modules/sda_fabric_sites_zones_workflow_manager.py b/plugins/modules/sda_fabric_sites_zones_workflow_manager.py index 6a85a82bc..d3dfa637e 100644 --- a/plugins/modules/sda_fabric_sites_zones_workflow_manager.py +++ b/plugins/modules/sda_fabric_sites_zones_workflow_manager.py @@ -1187,7 +1187,7 @@ def is_wired_data_collection_enable(self, site_name, site_id): function logs relevant messages and returns False. If wired data collection is enabled, it returns True. """ - self.log("Checking whether wired data collection is enabled or not for the site: {0}".format(site_name), "INFO") + self.log("Checking whether wired data collection is enabled for the site: {0}".format(site_name), "INFO") try: telemetry_response = self.dnac._exec( @@ -1212,7 +1212,7 @@ def is_wired_data_collection_enable(self, site_name, site_id): if not is_enabled: self.log("Wired Data Collection is not enabled at this site '{0}'.".format(site_name), "DEBUG") return False - + self.log("Wired Data Collection is enabled at this site '{0}'.".format(site_name), "DEBUG") except Exception as e: self.msg = ( "Exception occurred while getting telemetry settings for site '{0}' (ID: {1}): {2}".format(site_name, site_id, str(e)) @@ -1239,8 +1239,7 @@ def get_telemetry_details(self, site_name, site_id): to "failed." """ - self.log("Checking whether wired data collection is enabled or not for the site: {0}".format(site_name), "INFO") - + self.log("Fetching telemetry settings for site: {0}".format(site_name), "INFO") try: telemetry_response = self.dnac._exec( family="network_settings", @@ -1302,7 +1301,8 @@ def enable_wired_data_collection(self, site_name, site_id): self.msg = "Unable to retrieve the task_id for the task '{0}'.".format(task_name) self.set_operation_result("failed", False, self.msg, "ERROR") return self - success_msg = "Wired Data Collection gets enabled for the site '{0}'".format(site_name) + + success_msg = "Successfully enabled wired data collection for site '{0}'.".format(site_name) self.log(success_msg, "INFO") self.get_task_status_from_tasks_by_id(task_id, task_name, success_msg) except Exception as e: @@ -1368,7 +1368,7 @@ def get_diff_merged(self, config): if not is_wired_data_enable: self.log("Wired Data Collection is not enabled at this site '{0}'.".format(site_name), "INFO") self.enable_wired_data_collection(site_name, site_id).check_return_status() - self.log("Wired Data Collection is enabled at this site '{0}' successfully.".format(site_name), "INFO") + self.log("Wired Data Collection has been successfully enabled for site '{0}'.".format(site_name), "INFO") else: self.log("Wired Data Collection is already enabled at this site '{0}'.".format(site_name), "INFO") @@ -1399,8 +1399,8 @@ def get_diff_merged(self, config): if auth_profile and auth_profile != zone_in_ccc.get("authenticationProfileName"): self.log( - "Authentication profile '{0}' not matched with the profile in the Cisco Catalyst Center " - "for the fabric zone {1}".format(auth_profile, site_name), "INFO" + "Authentication profile '{0}' does not match the profile '{1}' in Cisco Catalyst Center " + "for the fabric zone '{2}'.".format(auth_profile, zone_in_ccc.get("authenticationProfileName"), site_name), "INFO" ) self.update_fabric_zone(site, zone_in_ccc).check_return_status() else: From d587688e3275cd12cb070ad581dbd11fc6880efa Mon Sep 17 00:00:00 2001 From: md-rafeek Date: Wed, 13 Nov 2024 13:07:18 +0530 Subject: [PATCH 76/83] Bug fixed for the AP - PNP devices --- plugins/modules/pnp_workflow_manager.py | 37 ++++++++++--------------- 1 file changed, 15 insertions(+), 22 deletions(-) diff --git a/plugins/modules/pnp_workflow_manager.py b/plugins/modules/pnp_workflow_manager.py index dfa0ef6a2..4360d26dc 100644 --- a/plugins/modules/pnp_workflow_manager.py +++ b/plugins/modules/pnp_workflow_manager.py @@ -393,8 +393,7 @@ def validate_input(self): ) if invalid_params: - self.msg = "Invalid parameters in playbook: {0}".format( - "\n".join(invalid_params)) + self.msg = "Invalid parameters in playbook: {0}".format("\n".join(invalid_params)) self.log(str(self.msg), "ERROR") self.set_operation_result("failed", False, self.msg, "ERROR").check_return_status() @@ -464,31 +463,26 @@ def get_site_type(self): self.log("Received site details for '{0}': {1}".format(self.want.get("site_name"), str(response)), "DEBUG") site = response.get("response") - site_additional_info = site[0].get("additionalInfo") site_type = None - for item in site_additional_info: - if item["nameSpace"] == "Location": - site_type = item.get("attributes").get("type") - self.log("Site type for site name '{1}' : {0}". - format(site_type, self.want.get("site_name")), "INFO") + + if self.compare_dnac_versions(self.get_ccc_version(), "2.3.5.3") <= 0: + site_additional_info = site[0].get("additionalInfo") + for item in site_additional_info: + if item["nameSpace"] == "Location": + site_type = item.get("attributes").get("type") + self.log("Site type for site name '{1}' : {0}". + format(site_type, self.want.get("site_name")), "INFO") + else: + site_type = site[0].get("type") + self.log("Site type for site name '{1}' : {0}". + format(site_type, self.want.get("site_name")), "INFO") + return site_type except Exception: self.msg = "Exception occurred as site '{0}' was not found".format(self.want.get("site_name")) self.log(self.msg, "CRITICAL") self.set_operation_result("failed", False, self.msg, "ERROR").check_return_status() - if response: - self.log("Received site details for '{0}': {1}".format(self.want.get("site_name"), - str(response)), "DEBUG") - site = response.get("response") - site_additional_info = site[0].get("additionalInfo") - for item in site_additional_info: - if item["nameSpace"] == "Location": - site_type = item.get("attributes").get("type") - self.log("Site type for site name '{1}' : {0}".format(site_type, self.want.get("site_name")), "INFO") - - return site_type - def get_pnp_params(self, params): """ Store pnp parameters from the playbook for pnp processing in Cisco Catalyst Center. @@ -785,8 +779,7 @@ def get_have(self): template_name = self.want.get("template_name") if template_name: if not (template_list and isinstance(template_list, list)): - self.msg = "Either project not found"\ - " or it is Empty." + self.msg = "Either project not found or it is Empty." self.log(self.msg, "CRITICAL") self.set_operation_result("failed", False, self.msg, "ERROR").check_return_status() From 1ba8d4a1f2f0ab4b3479c8a9f770a0d687817e27 Mon Sep 17 00:00:00 2001 From: Syed-khadeerahmed Date: Wed, 13 Nov 2024 14:21:07 +0530 Subject: [PATCH 77/83] review comments completed --- plugins/modules/swim_workflow_manager.py | 38 ++++++++++++------------ 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/plugins/modules/swim_workflow_manager.py b/plugins/modules/swim_workflow_manager.py index cd57ca8f3..94d831a60 100644 --- a/plugins/modules/swim_workflow_manager.py +++ b/plugins/modules/swim_workflow_manager.py @@ -180,18 +180,18 @@ description: | Specifies the device role(s) for tagging or untagging the image as golden. Permissible values: - - `ALL`: Applies the golden tag to all devices, regardless of role. - - `UNKNOWN`: For devices without a specific classification. - - `ACCESS`: Devices connecting end-user devices (e.g., access switches). - - `BORDER ROUTER`: Devices linking different network segments or domains. - - `DISTRIBUTION`: Devices aggregating traffic towards the core. - - `CORE`: Backbone devices handling high-volume network traffic. + - 'ALL': Applies the golden tag to all devices, regardless of role. + - 'UNKNOWN': Tags devices without a specified classification. + - 'ACCESS': Tags devices that connect end-user devices (e.g., access switches). + - 'BORDER ROUTER': Tags devices linking different network segments or domains. + - 'DISTRIBUTION': Tags devices aggregating traffic toward the core. + - 'CORE': Tags backbone devices handling high-volume network traffic. Behavior: - - If `device_role` is provided as a single string (e.g., `"ACCESS"`), that role alone will be tagged as golden. - - If `device_role` is provided as a single string with multiple roles (e.g., `"ACCESS,CORE"`), both roles will be tagged as golden. + - If 'device_role' is a single string (e.g., `"ACCESS"`), only that role is tagged as golden. + - If 'device_role' contains multiple roles (e.g., `"ACCESS,CORE"`), all specified roles are tagged as golden. Examples: - - Given `device_role: "ACCESS"`, only the `ACCESS` role will be tagged as golden. - - If `device_role: "ACCESS,CORE"` is used, both `ACCESS` and `CORE` roles will be tagged as golden. + - device_role: "ACCESS" tags only the `ACCESS` role as golden. + - device_role: "ACCESS,CORE" tags both `ACCESS` and `CORE` roles as golden. type: str device_image_family_name: description: Device Image family name(Eg Cisco Catalyst 9300 Switch) @@ -725,8 +725,8 @@ def get_cco_image_id(self, cco_image_name): Description: This function sends a request to Cisco Catalsyt Center to retrieve a list of software images - using the `returns_list_of_software_images` API. It then iterates through the response - to find a match for the provided `cco_image_name`. If a match is found, the corresponding + using the 'returns_list_of_software_images' API. It then iterates through the response + to find a match for the provided 'cco_image_name'. If a match is found, the corresponding image ID is returned. If no matching image is found, or if the image ID is not present in the response, the function logs an error message and raises an exception. """ @@ -1541,6 +1541,7 @@ def get_diff_tagging(self): tag_image_golden = tagging_details.get("tagging") image_name = self.get_image_name_from_id(self.have.get("tagging_image_id")) device_role = tagging_details.get("device_role", "ALL") + self.log("Parsed device roles: {0}".format(device_role), "DEBUG") device_role_no, already_un_tagged_device_role, already_tagged_device_role = [], [], [] device_roles = ["core", "distribution", "access", "border router", "unknown", "all"] @@ -1559,6 +1560,7 @@ def get_diff_tagging(self): self.result['response'] = self.msg self.check_return_status() + self.log("Checking golden tag status for each role...", "DEBUG") for role in device_role.split(','): image_params = { "image_id": self.have.get("tagging_image_id"), @@ -1567,6 +1569,7 @@ def get_diff_tagging(self): "device_role": role.upper() } + self.log("Parameters for checking tag status for role '{0}': {1}".format(role, image_params), "DEBUG") response = self.dnac._exec( family="software_image_management_swim", function="get_golden_tag_status_of_an_image", @@ -1578,19 +1581,15 @@ def get_diff_tagging(self): api_response = response.get('response') if api_response: image_status = api_response.get('taggedGolden') - self.log(image_status) - self.log(tag_image_golden) - - if image_status == tag_image_golden: - self.log("inside if") + if image_status and tag_image_golden is True: msg = "SWIM Image '{0}' already tagged as Golden image in Cisco Catalyst Center".format(image_name) self.log(msg, "INFO") already_tagged_device_role.append(role) - - if not image_status and image_status == tag_image_golden: + elif not image_status and not tag_image_golden: msg = "SWIM Image '{0}' already un-tagged from Golden image in Cisco Catalyst Center".format(image_name) self.log(msg, "INFO") already_un_tagged_device_role.append(role) + self.log("Verifying if all roles are in the desired tag status...", "DEBUG") # Check if all roles are tagged as Golden if tag_image_golden: @@ -2184,6 +2183,7 @@ def verify_diff_tagged(self): response = response.get('response') if response: image_status = response['taggedGolden'] + self.log("Current golden tag status for image '{0}': {1}".format(image_name, image_status), "DEBUG") if image_status == tag_image_golden: if tag_image_golden: self.msg = """The requested image '{0}' has been tagged as golden in the Cisco Catalyst Center and From 30188e2a6877aa0c7113ebb68556f9190dc2b714 Mon Sep 17 00:00:00 2001 From: JosePabloOcampo1212 Date: Wed, 13 Nov 2024 15:50:00 -0600 Subject: [PATCH 78/83] update changelog --- changelogs/changelog.yaml | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 68166ac2e..05d7c9ae4 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -1084,4 +1084,14 @@ releases: changes: release_summary: modifications to fix issues '#200'. minor_changes: - - Removing duplicates in the discovery.py module. snmpRwCommunity property. \ No newline at end of file + - Removing duplicates in the discovery.py module. snmpRwCommunity property. + - Changes in circleci to run test cases in integration branch + - Added support for bulk operations on multiple access points in accesspoint_workflow_manager + - Bug fixes in inventory_workflow_manager + - Enhancements in sda_fabric_devices_workflow_manager.py to support route distribution protocol + - Enhancements in sda_fabric_sites_zones_workflow_manager.py + - Bug fixes in sda_fabric_virtual_networks_workflow_manager.py + - Changes in site_workflow_manager + - accesspoint_workflow_manager - added attribute bulk_update_aps + - sda_fabric_devices_workflow_manager.py - added attribute route_distribution_protocol + - sda_fabric_sites_zones_workflow_manager.py - added attribute site_name_hierarchy and removed attribute site_name From 18394c76254351083a385c6aa8ddf1546b31cc42 Mon Sep 17 00:00:00 2001 From: JosePabloOcampo1212 <79291842+JosePabloOcampo1212@users.noreply.github.com> Date: Wed, 13 Nov 2024 16:16:10 -0600 Subject: [PATCH 79/83] Update sanity_tests_devel.yml --- .github/workflows/sanity_tests_devel.yml | 36 ++++++++++++------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/.github/workflows/sanity_tests_devel.yml b/.github/workflows/sanity_tests_devel.yml index 3c80be105..10d1a265f 100644 --- a/.github/workflows/sanity_tests_devel.yml +++ b/.github/workflows/sanity_tests_devel.yml @@ -62,24 +62,24 @@ jobs: - name: Install yamllint run: pip install --user yamllint - - name: Run yamllint - run: | - if [ -s changed_files.txt ]; then - changed_files=$(cat changed_files.txt) - lintable_files="" - for file in $changed_files; do - # Check if the file belongs to the plugins/modules or playbooks directory - if [[ $file == plugins/modules/* || $file == playbooks/* ]]; then - lintable_files="$lintable_files $file" + - name: Run yamllint + run: | + if [ -s changed_files.txt ]; then + changed_files=$(cat changed_files.txt) + lintable_files="" + for file in $changed_files; do + # Check if the file belongs to the plugins/modules or playbooks directory + if [[ $file == plugins/modules/* || $file == playbooks/* ]]; then + lintable_files="$lintable_files $file" + fi + done + + if [ -n "$lintable_files" ]; then + yamllint -c .yamllint.yml $lintable_files + else + echo "No relevant files to lint." fi - done - - if [ -n "$lintable_files" ]; then - yamllint -c .yamllint.yml $lintable_files else - echo "No relevant files to lint." + echo "No changed files to lint." fi - else - echo "No changed files to lint." - fi - working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}} \ No newline at end of file + working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}} From 56171280ef72e1bdb3a5c20fabc7cb77b62ce930 Mon Sep 17 00:00:00 2001 From: JosePabloOcampo1212 Date: Wed, 13 Nov 2024 16:24:05 -0600 Subject: [PATCH 80/83] update galaxy.yml version --- galaxy.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/galaxy.yml b/galaxy.yml index f719fc0d8..935f754c0 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -1,7 +1,7 @@ --- namespace: cisco name: dnac -version: 6.23.0 +version: 6.24.0 readme: README.md authors: - Rafael Campos From 59f99b280e2467b3fe6d3e80fe2ca41f4154809c Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Mon, 18 Nov 2024 14:04:57 -0500 Subject: [PATCH 81/83] converted yaml syntax errors to warning --- .yamllint.yml | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/.yamllint.yml b/.yamllint.yml index e31652694..51f48f766 100644 --- a/.yamllint.yml +++ b/.yamllint.yml @@ -3,18 +3,27 @@ extends: default ignore: - tests - changelogs/changelog.yaml -# - playbooks + # - playbooks + # - plugins + level: warning rules: + syntax-error: + level: warning + line-length: max: 160 level: warning + indentation: level: warning + trailing-spaces: level: warning + new-line-at-end-of-file: level: warning + key-duplicates: level: warning From 800b882b34fbcd13f9382f364ab80fd4de49b53e Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Mon, 18 Nov 2024 14:16:30 -0500 Subject: [PATCH 82/83] removed commented code --- .circleci/config.yml | 104 ------------------------------------------- 1 file changed, 104 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 3e0d25e64..9f53c85ed 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -340,25 +340,6 @@ jobs: find / -name run_tests.sh cat ccc_roles.yml | circleci tests run --command "xargs ./run_tests.sh" --split-by=timings --timings-type=name - # echo -n 0 > rc - # cat ccc_roles.yml | circleci tests run --command "xargs ./run_tests.sh" --split-by=timings --timings-type=name || echo -n $? > rc - # echo ${LOGS_SSH_KEY} | base64 -d > ~/id_ssh - # chmod 600 ~/id_ssh - # export NO_PROXY="" - # export no_proxy="" - # ssh $LOGS_MACHINE \ - # -o StrictHostKeyChecking=no \ - # -o UserKnownHostsFile=/dev/null \ - # -i ~/id_ssh \ - # mkdir -p /var/www/html/$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME/$CIRCLE_BUILD_NUM - # scp \ - # -i ~/id_ssh \ - # -o StrictHostKeyChecking=no \ - # -o UserKnownHostsFile=/dev/null \ - # sanity_tests_logs_$CIRCLE_NODE_INDEX.log $LOGS_MACHINE:/var/www/html/$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME/$CIRCLE_BUILD_NUM - # echo "LOGS URL: http://10.195.243.37/$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME/$CIRCLE_BUILD_NUM/sanity_tests_logs_$CIRCLE_NODE_INDEX.log" - # if [[ $(cat rc) != "0" ]]; then exit 1; fi - no_output_timeout: 120m post_pnp_testing: @@ -373,51 +354,6 @@ jobs: command: | python ${HOME}/static/pnp_script.py #TODO - # main-pr: - # docker: - # - image: maniator/gh:v2.49.2 - # resource_class: cisco-en-programmability/catalyst-center-ansible-runner-main - # steps: - # - run: - # name: Clone repo to workspace - # command: git clone --depth=1 -b $CIRCLE_BRANCH https://github.com/$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME.git . - # - run: - # name: Create release pr to main - # command: | - # gh repo set-default $CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME - # gh pr create \ - # --base main \ - # --head $CIRCLE_BRANCH \ - # --title "Release v<< pipeline.parameters.ansible_cisco_dnac_version >>" \ - # --body "Release v<< pipeline.parameters.ansible_cisco_dnac_version >>" - - # release-job: - # docker: - # - image: python:3.8.10 - # resource_class: cisco-en-programmability/catalyst-center-ansible-runner-main - # steps: - # - run: - # name: Clone repo to workspace - # command: | - # git clone --depth=1 https://github.com/$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME.git . - # git fetch origin $CIRCLE_BRANCH:work - # git checkout work - # - restore_cache: - # keys: - # - collection-<< pipeline.git.revision >> - # # - add_ssh_keys: - # # fingerprints: - # # - "KEY_FINGERPRINT" - # - run: - # echo create release - # git tag \ - # -a v<< pipeline.parameters.ansible_cisco_dnac_version >> \ - # -m "Ansible DNACCollection Version v<< pipeline.parameters.ansible_cisco_dnac_version >>" - # gh release create "v<< pipeline.parameters.ansible_cisco_dnac_version >>" \ - # --title "DNAC Collection Version v<< pipeline.parameters.ansible_cisco_dnac_version >>" \ - # --latest - # ansible-galaxy collection publish workspace/*.tar.gz --api-key=$GALAXYKEY - workflows: building: @@ -494,43 +430,3 @@ workflows: # requires: # - sanity-tests - # release-candidate: - # when: - # or: - # - equal: [run-release-prep, << pipeline.parameters.GHA_Meta >>] - # jobs: - # - build - # - addrole: - # matrix: - # parameters: - # run-all: - # - true - # - sanity-tests: - # requires: - # - addrole - # - build - # context: - # - dnac-servers - # - hold: - # type: approval - # requires: - # - sanity-tests - - # - main-pr: - # context: - # - gh-token - # requires: - # - hold - - # release: - # when: - # or: - # - equal: [run-release, << pipeline.parameters.GHA_Meta >>] - # jobs: - # - build - # - release-job: - # requires: - # - build - # context: - # - gh-token - # - galaxy-token From d53af961c1a973d0973453515953b08afb07d687 Mon Sep 17 00:00:00 2001 From: Rugvedi Kapse Date: Mon, 18 Nov 2024 17:16:13 -0500 Subject: [PATCH 83/83] fixed sanity issues --- .circleci/config.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 9f53c85ed..ca906f6d0 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -429,4 +429,3 @@ workflows: # - post_pnp_testing: # requires: # - sanity-tests -