From ae4d260b670511c9016b6958a6db810934889841 Mon Sep 17 00:00:00 2001 From: MUTHU-RAKESH-27 <19cs127@psgitech.ac.in> Date: Tue, 26 Mar 2024 15:20:55 +0530 Subject: [PATCH 01/18] Added the op_modifies=True for turning off the active validation and changed the result structure --- playbooks/template_workflow_manager.yml | 14 ++++ plugins/modules/template_workflow_manager.py | 87 +++++++++++--------- 2 files changed, 61 insertions(+), 40 deletions(-) diff --git a/playbooks/template_workflow_manager.yml b/playbooks/template_workflow_manager.yml index 25b0ec7975..3727ab4645 100644 --- a/playbooks/template_workflow_manager.yml +++ b/playbooks/template_workflow_manager.yml @@ -14,6 +14,10 @@ dnac_verify: "{{ dnac_verify }}" dnac_debug: "{{ dnac_debug }}" dnac_log: true + dnac_log_level: "{{ dnac_debug }}" + dnac_log_append: true + dnac_log_file_path: "{{ dnac_log_file_path }}" + validate_response_schema: false state: "merged" config_verify: true #ignore_errors: true #Enable this to continue execution even the task fails @@ -28,6 +32,16 @@ software_variant: "{{ item.variant }}" device_types: - product_family: "{{ item.family }}" + export: + project: + - Ansible_project + - Sample Velocity Templates + template: + - project_name : Onboarding Configuration + template_name: AP_Onboarding + import: + project: "{{ item.import_project }}" + # template: "{{ item.import_template }}" register: template_result with_items: '{{ template_details }}' tags: diff --git a/plugins/modules/template_workflow_manager.py b/plugins/modules/template_workflow_manager.py index 13e81da9a4..b3d9cc1cde 100644 --- a/plugins/modules/template_workflow_manager.py +++ b/plugins/modules/template_workflow_manager.py @@ -1331,7 +1331,11 @@ def __init__(self, module): self.supported_states = ["merged", "deleted"] self.accepted_languages = ["JINJA", "VELOCITY"] self.export_template = [] - self.result['response'].append({}) + self.result['response'] = [ + {"configurationTemplate": {"response": {}, "msg": {}}}, + {"export": {"response": {}}}, + {"import": {"response": {}}} + ] def validate_input(self): """ @@ -1917,7 +1921,7 @@ def get_template_params(self, params): self.status = "failed" return self.check_return_status() - temp_params.update({"project_name": projectName}) + temp_params.update({"projectName": projectName}) softwareType = params.get("software_type") if not softwareType: @@ -1956,7 +1960,7 @@ def get_template(self, config): result = items self.log("Received API response from 'get_template_details': {0}".format(items), "DEBUG") - self.result['response'] = items + self.result['response'][0].get("configurationTemplate").update({"items": items}) return result def get_have_project(self, config): @@ -2419,7 +2423,7 @@ def update_configuration_templates(self, configuration_templates): if is_template_found: if not self.requires_update(): # Template does not need update - self.result.update({ + self.result['response'][0].get("configurationTemplate").update({ 'response': self.have_template.get("template"), 'msg': "Template does not need update" }) @@ -2466,12 +2470,12 @@ def update_configuration_templates(self, configuration_templates): return self task_details = self.get_task_details(task_id) self.result['changed'] = True - self.result['msg'] = task_details.get('progress') - self.result['diff'] = configuration_templates + self.result['response'][0].get("configurationTemplate")['msg'] = task_details.get('progress') + self.result['response'][0].get("configurationTemplate")['diff'] = configuration_templates self.log("Task details for 'version_template': {0}".format(task_details), "DEBUG") - self.result['response'] = task_details if task_details else response + self.result['response'][0].get("configurationTemplate")['response'] = task_details if task_details else response - if not self.result.get('msg'): + if not self.result['response'][0].get("configurationTemplate").get('msg'): self.msg = "Error while versioning the template" self.status = "failed" return self @@ -2494,16 +2498,16 @@ def handle_export(self, export): response = self.dnac._exec( family="configuration_templates", function='export_projects', + op_modifies=True, params={ "payload": export_project, - "active_validation": False, }, ) validation_string = "successfully exported project" self.check_task_response_status(response, validation_string, True).check_return_status() - self.result['response'][0].update({"exportProject": self.msg}) + self.result['response'][1].get("export").get("response").update({"exportProject": self.msg}) export_values = export.get("template") if export_values: @@ -2513,16 +2517,16 @@ def handle_export(self, export): response = self.dnac._exec( family="configuration_templates", function='export_templates', + op_modifies=True, params={ "payload": self.export_template, - "active_validation": False, }, ) validation_string = "successfully exported template" self.check_task_response_status(response, validation_string, True).check_return_status() - self.result['response'][0].update({"exportTemplate": self.msg}) + self.result['response'][1].get("export").get("response").update({"exportTemplate": self.msg}) return self @@ -2566,14 +2570,15 @@ def handle_import(self, _import): response = self.dnac._exec( family="configuration_templates", function='imports_the_projects_provided', + op_modifies=True, params=_import_project, ) validation_string = "successfully imported project" self.check_task_response_status(response, validation_string).check_return_status() - self.result['response'][0].update({"importProject": validation_string}) + self.result['response'][2].get("import").get("response").update({"importProject": validation_string}) else: self.msg = "Projects '{0}' already available.".format(payload) - self.result['response'][0].update({ + self.result['response'][2].get("import").get("response").update({ "importProject": "Projects '{0}' already available.".format(payload) }) @@ -2610,11 +2615,12 @@ def handle_import(self, _import): response = self.dnac._exec( family="configuration_templates", function='imports_the_templates_provided', + op_modifies=True, params=import_template ) validation_string = "successfully imported template" self.check_task_response_status(response, validation_string).check_return_status() - self.result['response'][0].update({"importTemplate": validation_string}) + self.result['response'][2].get("import").get("response").update({"importTemplate": validation_string}) return self @@ -2685,13 +2691,13 @@ def delete_project_or_template(self, config, is_delete_project=False): if task_id: task_details = self.get_task_details(task_id) self.result['changed'] = True - self.result['msg'] = task_details.get('progress') - self.result['diff'] = config.get("configuration_templates") + self.result['response'][0].get("configurationTemplate")['msg'] = task_details.get('progress') + self.result['response'][0].get("configurationTemplate")['diff'] = config.get("configuration_templates") self.log("Task details for '{0}': {1}".format(deletion_value, task_details), "DEBUG") - self.result['response'] = task_details if task_details else response - if not self.result['msg']: - self.result['msg'] = "Error while deleting {name} : " + self.result['response'][0].get("configurationTemplate")['response'] = task_details if task_details else response + if not self.result['response'][0].get("configurationTemplate")['msg']: + self.result['response'][0].get("configurationTemplate")['msg'] = "Error while deleting {name} : " self.status = "failed" return self @@ -2774,11 +2780,11 @@ def verify_diff_merged(self, config): "softwareVariant", "templateContent"] for item in template_params: if self.have_template.get("template").get(item) != self.want.get("template_params").get(item): - self.msg = " Configuration Template config is not applied to the Cisco Catalyst Center." + self.msg = "Configuration Template config is not applied to the Cisco Catalyst Center." self.status = "failed" return self self.log("Successfully validated the Template in the Catalyst Center.", "INFO") - self.result.get("response").update({"Validation": "Success"}) + self.result['response'][0].get("configurationTemplate").get("response").update({"Validation": "Success"}) self.msg = "Successfully validated the Configuration Templates." self.status = "success" @@ -2816,7 +2822,7 @@ def verify_diff_deleted(self, config): return self self.log("Successfully validated absence of template in the Catalyst Center.", "INFO") - self.result.get("response").update({"Validation": "Success"}) + self.result['response'][0].get("configurationTemplate").get("response").update({"Validation": "Success"}) self.msg = "Successfully validated the absence of Template in the Cisco Catalyst Center." self.status = "success" @@ -2841,23 +2847,24 @@ def reset_values(self): def main(): """ main entry point for module execution""" - element_spec = {'dnac_host': {'required': True, 'type': 'str'}, - 'dnac_port': {'type': 'str', 'default': '443'}, - 'dnac_username': {'type': 'str', 'default': 'admin', 'aliases': ['user']}, - 'dnac_password': {'type': 'str', 'no_log': True}, - 'dnac_verify': {'type': 'bool', 'default': 'True'}, - 'dnac_version': {'type': 'str', 'default': '2.2.3.3'}, - 'dnac_debug': {'type': 'bool', 'default': False}, - 'dnac_log': {'type': 'bool', 'default': False}, - "dnac_log_level": {"type": 'str', "default": 'WARNING'}, - "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'}, - "dnac_log_append": {"type": 'bool', "default": True}, - 'validate_response_schema': {'type': 'bool', 'default': True}, - "config_verify": {"type": 'bool', "default": False}, - 'dnac_api_task_timeout': {'type': 'int', "default": 1200}, - 'dnac_task_poll_interval': {'type': 'int', "default": 2}, - 'config': {'required': True, 'type': 'list', 'elements': 'dict'}, - 'state': {'default': 'merged', 'choices': ['merged', 'deleted']} + element_spec = { + 'dnac_host': {'required': True, 'type': 'str'}, + 'dnac_port': {'type': 'str', 'default': '443'}, + 'dnac_username': {'type': 'str', 'default': 'admin', 'aliases': ['user']}, + 'dnac_password': {'type': 'str', 'no_log': True}, + 'dnac_verify': {'type': 'bool', 'default': 'True'}, + 'dnac_version': {'type': 'str', 'default': '2.2.3.3'}, + 'dnac_debug': {'type': 'bool', 'default': False}, + 'dnac_log': {'type': 'bool', 'default': False}, + "dnac_log_level": {"type": 'str', "default": 'WARNING'}, + "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'}, + "dnac_log_append": {"type": 'bool', "default": True}, + 'validate_response_schema': {'type': 'bool', 'default': True}, + "config_verify": {"type": 'bool', "default": False}, + 'dnac_api_task_timeout': {'type': 'int', "default": 1200}, + 'dnac_task_poll_interval': {'type': 'int', "default": 2}, + 'config': {'required': True, 'type': 'list', 'elements': 'dict'}, + 'state': {'default': 'merged', 'choices': ['merged', 'deleted']} } module = AnsibleModule(argument_spec=element_spec, supports_check_mode=False) From 5e9c12fcd937114acf019c1ca40c4ff0aa86b462 Mon Sep 17 00:00:00 2001 From: MUTHU-RAKESH-27 <19cs127@psgitech.ac.in> Date: Tue, 26 Mar 2024 15:36:35 +0530 Subject: [PATCH 02/18] resolved the sanity error --- plugins/modules/template_workflow_manager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/modules/template_workflow_manager.py b/plugins/modules/template_workflow_manager.py index b3d9cc1cde..ab6aacd889 100644 --- a/plugins/modules/template_workflow_manager.py +++ b/plugins/modules/template_workflow_manager.py @@ -2865,7 +2865,7 @@ def main(): 'dnac_task_poll_interval': {'type': 'int', "default": 2}, 'config': {'required': True, 'type': 'list', 'elements': 'dict'}, 'state': {'default': 'merged', 'choices': ['merged', 'deleted']} - } + } module = AnsibleModule(argument_spec=element_spec, supports_check_mode=False) ccc_template = Template(module) From 35c366a001c14ea6703aa15567eb2768008cc3dc Mon Sep 17 00:00:00 2001 From: MUTHU-RAKESH-27 <19cs127@psgitech.ac.in> Date: Tue, 26 Mar 2024 15:41:00 +0530 Subject: [PATCH 03/18] Resolved the sanity error --- playbooks/template_workflow_manager.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/playbooks/template_workflow_manager.yml b/playbooks/template_workflow_manager.yml index 3727ab4645..f71e29480a 100644 --- a/playbooks/template_workflow_manager.yml +++ b/playbooks/template_workflow_manager.yml @@ -37,7 +37,7 @@ - Ansible_project - Sample Velocity Templates template: - - project_name : Onboarding Configuration + - project_name: Onboarding Configuration template_name: AP_Onboarding import: project: "{{ item.import_project }}" From bc609736c86bc4a4178c707aaa8be9bb2c5369a0 Mon Sep 17 00:00:00 2001 From: Abinash Date: Tue, 26 Mar 2024 11:36:41 +0000 Subject: [PATCH 04/18] Adding method to check valid ip address --- plugins/module_utils/dnac.py | 25 +++++++++++++ plugins/modules/discovery_intent.py | 36 +++++++++++++++++++ plugins/modules/discovery_workflow_manager.py | 36 +++++++++++++++++++ 3 files changed, 97 insertions(+) diff --git a/plugins/module_utils/dnac.py b/plugins/module_utils/dnac.py index a12e7eaf47..ec190d6123 100644 --- a/plugins/module_utils/dnac.py +++ b/plugins/module_utils/dnac.py @@ -27,6 +27,7 @@ # import datetime import inspect import re +import socket class DnacBase(): @@ -485,6 +486,30 @@ def update_site_type_key(self, config): return new_config + def is_valid_ip(self, ip_address): + """ + Validates an IPv4 address. + + Parameters: + ip_address - String denoting the IPv4 address passed. + + Returns: + bool - Returns true if the passed IP address value is correct or it returns + false if it is incorrect + """ + + try: + socket.inet_aton(ip_address) + octets = ip_address.split('.') + if len(octets) != 4: + return False + for octet in octets: + if not 0 <= int(octet) <= 255: + return False + return True + except socket.error: + return False + def is_list_complex(x): return isinstance(x[0], dict) or isinstance(x[0], list) diff --git a/plugins/modules/discovery_intent.py b/plugins/modules/discovery_intent.py index 96759bb9c6..680a31b46a 100644 --- a/plugins/modules/discovery_intent.py +++ b/plugins/modules/discovery_intent.py @@ -622,6 +622,7 @@ ) import time import re +import ipaddress class Discovery(DnacBase): @@ -721,6 +722,40 @@ def validate_input(self, state=None): self.status = "success" return self + def validate_ip_address_list(self): + """ + Validates each ip adress paased in the IP_address_list passed by the user before preprocessing it + """ + + ip_address_list = self.validated_config[0].get('ip_address_list') + for ip in ip_address_list: + if '/' in ip: + ip = ip.split("/")[0] + if '-' in ip: + if len(ip.split('-')) == 2: + ip1, ip2 = ip.split('-') + if self.is_valid_ip(ip1) is False: + msg = "IP address {0} is not valid".format(ip1) + self.log(msg, "CRITICAL") + self.module.fail_json(msg=msg) + if self.is_valid_ip(ip2) is False: + msg = "IP address {0} is not valid".format(ip2) + self.log(msg, "CRITICAL") + self.module.fail_json(msg=msg) + if ipaddress.IPv4Address(ip1) > ipaddress.IPv4Address(ip2): + msg = "Incorrect range passed. Please pass correct IP address range" + self.log(msg, "CRITICAL") + self.module.fail_json(msg=msg) + else: + msg = "IP address range should have only upper and lower limit values" + self.log(msg, "CRITICAL") + self.module.fail_json(msg=msg) + if self.is_valid_ip(ip) is False and '-' not in ip: + msg = "IP address {0} is not valid".format(ip) + self.log(msg, "CRITICAL") + self.module.fail_json(msg=msg) + self.log("All the IP adresses passed are correct", "INFO") + def get_creds_ids_list(self): """ Retrieve the list of credentials IDs associated with class instance. @@ -1513,6 +1548,7 @@ def get_diff_merged(self): - self: The instance of the class with updated attributes. """ + self.validate_ip_address_list() devices_list_info = self.get_devices_list_info() ip_address_list = self.preprocess_device_discovery(devices_list_info) exist_discovery = self.get_exist_discovery() diff --git a/plugins/modules/discovery_workflow_manager.py b/plugins/modules/discovery_workflow_manager.py index 88ce124a39..4edc873965 100644 --- a/plugins/modules/discovery_workflow_manager.py +++ b/plugins/modules/discovery_workflow_manager.py @@ -622,6 +622,7 @@ ) import time import re +import ipaddress class Discovery(DnacBase): @@ -721,6 +722,40 @@ def validate_input(self, state=None): self.status = "success" return self + def validate_ip_address_list(self): + """ + Validates each ip adress paased in the IP_address_list passed by the user before preprocessing it + """ + + ip_address_list = self.validated_config[0].get('ip_address_list') + for ip in ip_address_list: + if '/' in ip: + ip = ip.split("/")[0] + if '-' in ip: + if len(ip.split('-')) == 2: + ip1, ip2 = ip.split('-') + if self.is_valid_ip(ip1) is False: + msg = "IP address {0} is not valid".format(ip1) + self.log(msg, "CRITICAL") + self.module.fail_json(msg=msg) + if self.is_valid_ip(ip2) is False: + msg = "IP address {0} is not valid".format(ip2) + self.log(msg, "CRITICAL") + self.module.fail_json(msg=msg) + if ipaddress.IPv4Address(ip1) > ipaddress.IPv4Address(ip2): + msg = "Incorrect range passed. Please pass correct IP address range" + self.log(msg, "CRITICAL") + self.module.fail_json(msg=msg) + else: + msg = "IP address range should have only upper and lower limit values" + self.log(msg, "CRITICAL") + self.module.fail_json(msg=msg) + if self.is_valid_ip(ip) is False and '-' not in ip: + msg = "IP address {0} is not valid".format(ip) + self.log(msg, "CRITICAL") + self.module.fail_json(msg=msg) + self.log("All the IP adresses passed are correct", "INFO") + def get_creds_ids_list(self): """ Retrieve the list of credentials IDs associated with class instance. @@ -1513,6 +1548,7 @@ def get_diff_merged(self): - self: The instance of the class with updated attributes. """ + self.validate_ip_address_list() devices_list_info = self.get_devices_list_info() ip_address_list = self.preprocess_device_discovery(devices_list_info) exist_discovery = self.get_exist_discovery() From 1d05c276a34357c82b7e8ad1ee82a7e48e8e8948 Mon Sep 17 00:00:00 2001 From: Abinash Date: Tue, 26 Mar 2024 11:55:57 +0000 Subject: [PATCH 05/18] Adding method to check valid ip address --- plugins/modules/discovery_intent.py | 12 +++++++----- plugins/modules/discovery_workflow_manager.py | 12 +++++++----- 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/plugins/modules/discovery_intent.py b/plugins/modules/discovery_intent.py index 680a31b46a..41f54ae993 100644 --- a/plugins/modules/discovery_intent.py +++ b/plugins/modules/discovery_intent.py @@ -622,7 +622,6 @@ ) import time import re -import ipaddress class Discovery(DnacBase): @@ -742,10 +741,13 @@ def validate_ip_address_list(self): msg = "IP address {0} is not valid".format(ip2) self.log(msg, "CRITICAL") self.module.fail_json(msg=msg) - if ipaddress.IPv4Address(ip1) > ipaddress.IPv4Address(ip2): - msg = "Incorrect range passed. Please pass correct IP address range" - self.log(msg, "CRITICAL") - self.module.fail_json(msg=msg) + ip1_parts = list(map(int, ip1.split('.'))) + ip2_parts = list(map(int, ip2.split('.'))) + for part in range(4): + if ip1_parts[part] > ip2_parts[part]: + msg = "Incorrect range passed. Please pass correct IP address range" + self.log(msg, "CRITICAL") + self.module.fail_json(msg=msg) else: msg = "IP address range should have only upper and lower limit values" self.log(msg, "CRITICAL") diff --git a/plugins/modules/discovery_workflow_manager.py b/plugins/modules/discovery_workflow_manager.py index 4edc873965..da895f33c9 100644 --- a/plugins/modules/discovery_workflow_manager.py +++ b/plugins/modules/discovery_workflow_manager.py @@ -622,7 +622,6 @@ ) import time import re -import ipaddress class Discovery(DnacBase): @@ -742,10 +741,13 @@ def validate_ip_address_list(self): msg = "IP address {0} is not valid".format(ip2) self.log(msg, "CRITICAL") self.module.fail_json(msg=msg) - if ipaddress.IPv4Address(ip1) > ipaddress.IPv4Address(ip2): - msg = "Incorrect range passed. Please pass correct IP address range" - self.log(msg, "CRITICAL") - self.module.fail_json(msg=msg) + ip1_parts = list(map(int, ip1.split('.'))) + ip2_parts = list(map(int, ip2.split('.'))) + for part in range(4): + if ip1_parts[part] > ip2_parts[part]: + msg = "Incorrect range passed. Please pass correct IP address range" + self.log(msg, "CRITICAL") + self.module.fail_json(msg=msg) else: msg = "IP address range should have only upper and lower limit values" self.log(msg, "CRITICAL") From 3b45b18d760ae739b6adb46aa41cb293c9248310 Mon Sep 17 00:00:00 2001 From: Abhishek-121 Date: Wed, 27 Mar 2024 09:10:42 +0530 Subject: [PATCH 06/18] fix the issue of key error building while creating other type of site --- plugins/modules/site_intent.py | 15 +++++++++------ plugins/modules/site_workflow_manager.py | 15 +++++++++------ 2 files changed, 18 insertions(+), 12 deletions(-) diff --git a/plugins/modules/site_intent.py b/plugins/modules/site_intent.py index 751d520bea..37a474affc 100644 --- a/plugins/modules/site_intent.py +++ b/plugins/modules/site_intent.py @@ -807,12 +807,15 @@ def get_diff_merged(self, config): else: # Creating New Site site_params = self.want.get("site_params") - if site_params['site']['building']: - building_details = {} - for key, value in site_params['site']['building'].items(): - if value is not None: - building_details[key] = value - site_params['site']['building'] = building_details + try: + if site_params['site']['building']: + building_details = {} + for key, value in site_params['site']['building'].items(): + if value is not None: + building_details[key] = value + site_params['site']['building'] = building_details + except Exception as e: + self.log("Given site is not of type building so no need to remove None keys from the site_params dictionary", "INFO") response = self.dnac._exec( family="sites", diff --git a/plugins/modules/site_workflow_manager.py b/plugins/modules/site_workflow_manager.py index 1ae28afd8c..22a817917a 100644 --- a/plugins/modules/site_workflow_manager.py +++ b/plugins/modules/site_workflow_manager.py @@ -806,12 +806,15 @@ def get_diff_merged(self, config): else: # Creating New Site site_params = self.want.get("site_params") - if site_params['site']['building']: - building_details = {} - for key, value in site_params['site']['building'].items(): - if value is not None: - building_details[key] = value - site_params['site']['building'] = building_details + try: + if site_params['site']['building']: + building_details = {} + for key, value in site_params['site']['building'].items(): + if value is not None: + building_details[key] = value + site_params['site']['building'] = building_details + except Exception as e: + self.log("Given site is not of type building so no need to remove None keys from the site_params dictionary", "INFO") response = self.dnac._exec( family="sites", From b1ee68902a118017412764a219f03c62307dc1a9 Mon Sep 17 00:00:00 2001 From: Abhishek-121 Date: Wed, 27 Mar 2024 11:54:59 +0530 Subject: [PATCH 07/18] Enhance log message for area and floor site --- plugins/modules/site_intent.py | 5 ++++- plugins/modules/site_workflow_manager.py | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/plugins/modules/site_intent.py b/plugins/modules/site_intent.py index 37a474affc..eb6c0feefa 100644 --- a/plugins/modules/site_intent.py +++ b/plugins/modules/site_intent.py @@ -815,7 +815,10 @@ def get_diff_merged(self, config): building_details[key] = value site_params['site']['building'] = building_details except Exception as e: - self.log("Given site is not of type building so no need to remove None keys from the site_params dictionary", "INFO") + site_type = site_params['type'] + site_name = site_params['site'][site_type]['name'] + self.log("""The site '{0}' is not categorized as a building; hence, there is no need to filter out 'None' + values from the 'site_params' dictionary.""".format(site_name), "INFO") response = self.dnac._exec( family="sites", diff --git a/plugins/modules/site_workflow_manager.py b/plugins/modules/site_workflow_manager.py index 22a817917a..1af1532b28 100644 --- a/plugins/modules/site_workflow_manager.py +++ b/plugins/modules/site_workflow_manager.py @@ -814,7 +814,10 @@ def get_diff_merged(self, config): building_details[key] = value site_params['site']['building'] = building_details except Exception as e: - self.log("Given site is not of type building so no need to remove None keys from the site_params dictionary", "INFO") + site_type = site_params['type'] + site_name = site_params['site'][site_type]['name'] + self.log("""The site '{0}' is not categorized as a building; hence, there is no need to filter out 'None' + values from the 'site_params' dictionary.""".format(site_name), "INFO") response = self.dnac._exec( family="sites", From 7c03ef7b295c5036bf794ad9694f2878d435431c Mon Sep 17 00:00:00 2001 From: MUTHU-RAKESH-27 <19cs127@psgitech.ac.in> Date: Wed, 27 Mar 2024 12:24:31 +0530 Subject: [PATCH 08/18] Added the op_modifies=True to all the SDK calls which take an input payload --- plugins/modules/device_credential_intent.py | 7 +++++-- .../modules/device_credential_workflow_manager.py | 7 +++++-- plugins/modules/network_settings_intent.py | 14 +++++++++++--- .../modules/network_settings_workflow_manager.py | 14 +++++++++++--- plugins/modules/template_intent.py | 12 +++++++++--- plugins/modules/template_workflow_manager.py | 6 +++++- 6 files changed, 46 insertions(+), 14 deletions(-) diff --git a/plugins/modules/device_credential_intent.py b/plugins/modules/device_credential_intent.py index 8e2f413843..ec8171d7e0 100644 --- a/plugins/modules/device_credential_intent.py +++ b/plugins/modules/device_credential_intent.py @@ -881,6 +881,7 @@ def get_site_id(self, site_name): response = self.dnac._exec( family="sites", function='get_site', + op_modifies=True, params={"name": site_name}, ) self.log("Received API response from 'get_site': {0}".format(response), "DEBUG") @@ -2209,6 +2210,7 @@ def create_device_credentials(self): response = self.dnac._exec( family="discovery", function='create_global_credentials_v2', + op_modifies=True, params=credential_params, ) self.log("Received API response from 'create_global_credentials_v2': {0}" @@ -2273,6 +2275,7 @@ def update_device_credentials(self): response = self.dnac._exec( family="discovery", function='update_global_credentials_v2', + op_modifies=True, params=credential_params, ) self.log("Received API response for 'update_global_credentials_v2': {0}" @@ -2328,6 +2331,7 @@ def assign_credentials_to_site(self): response = self.dnac._exec( family="network_settings", function='assign_device_credential_to_site_v2', + op_modifies=True, params=credential_params, ) self.log("Received API response for 'assign_device_credential_to_site_v2': {0}" @@ -2414,6 +2418,7 @@ def delete_device_credential(self, config): response = self.dnac._exec( family="discovery", function="delete_global_credential_v2", + op_modifies=True, params={"id": _id}, ) self.log("Received API response for 'delete_global_credential_v2': {0}" @@ -2583,8 +2588,6 @@ def main(): "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'}, "dnac_log_append": {"type": 'bool', "default": True}, "config_verify": {"type": 'bool', "default": False}, - 'dnac_api_task_timeout': {'type': 'int', "default": 1200}, - 'dnac_task_poll_interval': {'type': 'int', "default": 2}, "config": {"type": 'list', "required": True, "elements": 'dict'}, "state": {"default": 'merged', "choices": ['merged', 'deleted']}, "validate_response_schema": {"type": 'bool', "default": True}, diff --git a/plugins/modules/device_credential_workflow_manager.py b/plugins/modules/device_credential_workflow_manager.py index 3db97ce05a..1a1dde590c 100644 --- a/plugins/modules/device_credential_workflow_manager.py +++ b/plugins/modules/device_credential_workflow_manager.py @@ -880,6 +880,7 @@ def get_site_id(self, site_name): response = self.dnac._exec( family="sites", function='get_site', + op_modifies=True, params={"name": site_name}, ) self.log("Received API response from 'get_site': {0}".format(response), "DEBUG") @@ -2208,6 +2209,7 @@ def create_device_credentials(self): response = self.dnac._exec( family="discovery", function='create_global_credentials_v2', + op_modifies=True, params=credential_params, ) self.log("Received API response from 'create_global_credentials_v2': {0}" @@ -2272,6 +2274,7 @@ def update_device_credentials(self): response = self.dnac._exec( family="discovery", function='update_global_credentials_v2', + op_modifies=True, params=credential_params, ) self.log("Received API response for 'update_global_credentials_v2': {0}" @@ -2327,6 +2330,7 @@ def assign_credentials_to_site(self): response = self.dnac._exec( family="network_settings", function='assign_device_credential_to_site_v2', + op_modifies=True, params=credential_params, ) self.log("Received API response for 'assign_device_credential_to_site_v2': {0}" @@ -2413,6 +2417,7 @@ def delete_device_credential(self, config): response = self.dnac._exec( family="discovery", function="delete_global_credential_v2", + op_modifies=True, params={"id": _id}, ) self.log("Received API response for 'delete_global_credential_v2': {0}" @@ -2582,8 +2587,6 @@ def main(): "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'}, "dnac_log_append": {"type": 'bool', "default": True}, "config_verify": {"type": 'bool', "default": False}, - 'dnac_api_task_timeout': {'type': 'int', "default": 1200}, - 'dnac_task_poll_interval': {'type': 'int', "default": 2}, "config": {"type": 'list', "required": True, "elements": 'dict'}, "state": {"default": 'merged', "choices": ['merged', 'deleted']}, "validate_response_schema": {"type": 'bool', "default": True}, diff --git a/plugins/modules/network_settings_intent.py b/plugins/modules/network_settings_intent.py index 49d6fa5d47..88134e23c1 100644 --- a/plugins/modules/network_settings_intent.py +++ b/plugins/modules/network_settings_intent.py @@ -705,6 +705,7 @@ def get_site_id(self, site_name): response = self.dnac._exec( family="sites", function='get_site', + op_modifies=True, params={"name": site_name}, ) self.log("Received API response from 'get_site': {0}".format(response), "DEBUG") @@ -856,6 +857,7 @@ def get_network_params(self, site_id): response = self.dnac._exec( family="network_settings", function='get_network_v2', + op_modifies=True, params={"site_id": site_id} ) self.log("Received API response from 'get_network_v2': {0}".format(response), "DEBUG") @@ -1061,7 +1063,8 @@ def reserve_pool_exists(self, name, site_name): response = self.dnac._exec( family="network_settings", function="get_reserve_ip_subpool", - params={"siteId": site_id} + op_modifies=True, + params={"site_id": site_id} ) if not isinstance(response, dict): reserve_pool.update({"success": False}) @@ -1748,6 +1751,7 @@ def update_global_pool(self, config): response = self.dnac._exec( family="network_settings", function="create_global_pool", + op_modifies=True, params=pool_params, ) self.check_execution_response_status(response).check_return_status() @@ -1793,6 +1797,7 @@ def update_global_pool(self, config): response = self.dnac._exec( family="network_settings", function="update_global_pool", + op_modifies=True, params=pool_params, ) @@ -1836,6 +1841,7 @@ def update_reserve_pool(self, config): response = self.dnac._exec( family="network_settings", function="reserve_ip_subpool", + op_modifies=True, params=reserve_params, ) self.check_execution_response_status(response).check_return_status() @@ -1868,6 +1874,7 @@ def update_reserve_pool(self, config): response = self.dnac._exec( family="network_settings", function="update_reserve_ip_subpool", + op_modifies=True, params=reserve_params, ) self.check_execution_response_status(response).check_return_status() @@ -1915,6 +1922,7 @@ def update_network(self, config): response = self.dnac._exec( family="network_settings", function='update_network_v2', + op_modifies=True, params=net_params, ) self.log("Received API response of 'update_network_v2': {0}".format(response), "DEBUG") @@ -1978,6 +1986,7 @@ def delete_reserve_pool(self, name): response = self.dnac._exec( family="network_settings", function="release_reserve_ip_subpool", + op_modifies=True, params={"id": _id}, ) self.check_execution_response_status(response).check_return_status() @@ -2014,6 +2023,7 @@ def delete_global_pool(self, name): response = self.dnac._exec( family="network_settings", function="delete_global_ip_pool", + op_modifies=True, params={"id": self.have.get("globalPool").get("id")}, ) @@ -2190,8 +2200,6 @@ def main(): "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'}, "dnac_log_append": {"type": 'bool', "default": True}, "config_verify": {"type": 'bool', "default": False}, - "dnac_api_task_timeout": {"type": 'int', "default": 1200}, - "dnac_task_poll_interval": {"type": 'int', "default": 2}, "config": {"type": 'list', "required": True, "elements": 'dict'}, "state": {"default": 'merged', "choices": ['merged', 'deleted']}, "validate_response_schema": {"type": 'bool', "default": True}, diff --git a/plugins/modules/network_settings_workflow_manager.py b/plugins/modules/network_settings_workflow_manager.py index bbae364635..524d236c5b 100644 --- a/plugins/modules/network_settings_workflow_manager.py +++ b/plugins/modules/network_settings_workflow_manager.py @@ -701,6 +701,7 @@ def get_site_id(self, site_name): response = self.dnac._exec( family="sites", function='get_site', + op_modifies=True, params={"name": site_name}, ) self.log("Received API response from 'get_site': {0}".format(response), "DEBUG") @@ -852,6 +853,7 @@ def get_network_params(self, site_id): response = self.dnac._exec( family="network_settings", function='get_network_v2', + op_modifies=True, params={"site_id": site_id} ) self.log("Received API response from 'get_network_v2': {0}".format(response), "DEBUG") @@ -1057,7 +1059,8 @@ def reserve_pool_exists(self, name, site_name): response = self.dnac._exec( family="network_settings", function="get_reserve_ip_subpool", - params={"siteId": site_id} + op_modifies=True, + params={"site_id": site_id} ) if not isinstance(response, dict): reserve_pool.update({"success": False}) @@ -1733,6 +1736,7 @@ def update_global_pool(self, config): response = self.dnac._exec( family="network_settings", function="create_global_pool", + op_modifies=True, params=pool_params, ) self.check_execution_response_status(response).check_return_status() @@ -1778,6 +1782,7 @@ def update_global_pool(self, config): response = self.dnac._exec( family="network_settings", function="update_global_pool", + op_modifies=True, params=pool_params, ) @@ -1821,6 +1826,7 @@ def update_reserve_pool(self, config): response = self.dnac._exec( family="network_settings", function="reserve_ip_subpool", + op_modifies=True, params=reserve_params, ) self.check_execution_response_status(response).check_return_status() @@ -1853,6 +1859,7 @@ def update_reserve_pool(self, config): response = self.dnac._exec( family="network_settings", function="update_reserve_ip_subpool", + op_modifies=True, params=reserve_params, ) self.check_execution_response_status(response).check_return_status() @@ -1900,6 +1907,7 @@ def update_network(self, config): response = self.dnac._exec( family="network_settings", function='update_network_v2', + op_modifies=True, params=net_params, ) self.log("Received API response of 'update_network_v2': {0}".format(response), "DEBUG") @@ -1963,6 +1971,7 @@ def delete_reserve_pool(self, name): response = self.dnac._exec( family="network_settings", function="release_reserve_ip_subpool", + op_modifies=True, params={"id": _id}, ) self.check_execution_response_status(response).check_return_status() @@ -1999,6 +2008,7 @@ def delete_global_pool(self, name): response = self.dnac._exec( family="network_settings", function="delete_global_ip_pool", + op_modifies=True, params={"id": self.have.get("globalPool").get("id")}, ) @@ -2175,8 +2185,6 @@ def main(): "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'}, "dnac_log_append": {"type": 'bool', "default": True}, "config_verify": {"type": 'bool', "default": False}, - "dnac_api_task_timeout": {"type": 'int', "default": 1200}, - "dnac_task_poll_interval": {"type": 'int', "default": 2}, "config": {"type": 'list', "required": True, "elements": 'dict'}, "state": {"default": 'merged', "choices": ['merged', 'deleted']}, "validate_response_schema": {"type": 'bool', "default": True}, diff --git a/plugins/modules/template_intent.py b/plugins/modules/template_intent.py index c6e3042de0..31243cfdce 100644 --- a/plugins/modules/template_intent.py +++ b/plugins/modules/template_intent.py @@ -1896,6 +1896,7 @@ def get_template(self, config): items = self.dnac_apply['exec']( family="configuration_templates", function="get_template_details", + op_modifies=True, params={"template_id": config.get("templateId")} ) if items: @@ -1980,6 +1981,7 @@ def get_have_template(self, config, template_available): template_list = self.dnac_apply['exec']( family="configuration_templates", function="gets_the_templates_available", + op_modifies=True, params={"projectNames": config.get("projectName")}, ) have_template["isCommitPending"] = True @@ -2371,8 +2373,8 @@ def update_configuration_templates(self, config): response = self.dnac_apply['exec']( family="configuration_templates", function="update_template", - params=template_params, op_modifies=True, + params=template_params, ) template_updated = True self.log("Updating existing template '{0}'." @@ -2442,6 +2444,7 @@ def handle_export(self, config): response = self.dnac._exec( family="configuration_templates", function='export_projects', + op_modifies=True, params={"payload": export_project}, ) validation_string = "successfully exported project" @@ -2458,6 +2461,7 @@ def handle_export(self, config): response = self.dnac._exec( family="configuration_templates", function='export_templates', + op_modifies=True, params={"payload": self.export_template}, ) validation_string = "successfully exported template" @@ -2501,6 +2505,7 @@ def handle_import(self, config): response = self.dnac._exec( family="configuration_templates", function='imports_the_projects_provided', + op_modifies=True, params=_import_project, ) validation_string = "successfully imported project" @@ -2529,6 +2534,7 @@ def handle_import(self, config): response = self.dnac._exec( family="configuration_templates", function='imports_the_templates_provided', + op_modifies=True, params=import_template, ) validation_string = "successfully imported template" @@ -2592,6 +2598,7 @@ def delete_project_or_template(self, config, is_delete_project=False): response = self.dnac_apply['exec']( family="configuration_templates", function=deletion_value, + op_modifies=True, params=params_key, ) task_id = response.get("response").get("taskId") @@ -2716,6 +2723,7 @@ def verify_diff_deleted(self, config): template_list = self.dnac_apply['exec']( family="configuration_templates", function="gets_the_templates_available", + op_modifies=True, params={"projectNames": config.get("projectName")}, ) if template_list and isinstance(template_list, list): @@ -2767,8 +2775,6 @@ def main(): "dnac_log_append": {"type": 'bool', "default": True}, 'validate_response_schema': {'type': 'bool', 'default': True}, "config_verify": {"type": 'bool', "default": False}, - 'dnac_api_task_timeout': {'type': 'int', "default": 1200}, - 'dnac_task_poll_interval': {'type': 'int', "default": 2}, 'config': {'required': True, 'type': 'list', 'elements': 'dict'}, 'state': {'default': 'merged', 'choices': ['merged', 'deleted']} } diff --git a/plugins/modules/template_workflow_manager.py b/plugins/modules/template_workflow_manager.py index ab6aacd889..e47199ed5a 100644 --- a/plugins/modules/template_workflow_manager.py +++ b/plugins/modules/template_workflow_manager.py @@ -1954,6 +1954,7 @@ def get_template(self, config): items = self.dnac_apply['exec']( family="configuration_templates", function="get_template_details", + op_modifies=True, params={"template_id": config.get("templateId")} ) if items: @@ -2038,6 +2039,7 @@ def get_have_template(self, config, template_available): template_list = self.dnac_apply['exec']( family="configuration_templates", function="gets_the_templates_available", + op_modifies=True, params={"projectNames": config.get("projectName")}, ) have_template["isCommitPending"] = True @@ -2437,8 +2439,8 @@ def update_configuration_templates(self, configuration_templates): response = self.dnac_apply['exec']( family="configuration_templates", function="update_template", - params=template_params, op_modifies=True, + params=template_params, ) template_updated = True self.log("Updating existing template '{0}'." @@ -2685,6 +2687,7 @@ def delete_project_or_template(self, config, is_delete_project=False): response = self.dnac_apply['exec']( family="configuration_templates", function=deletion_value, + op_modifies=True, params=params_key, ) task_id = response.get("response").get("taskId") @@ -2809,6 +2812,7 @@ def verify_diff_deleted(self, config): template_list = self.dnac_apply['exec']( family="configuration_templates", function="gets_the_templates_available", + op_modifies=True, params={"projectNames": config.get("projectName")}, ) if template_list and isinstance(template_list, list): From bd0047417a527da99aa5d1df7ca73d6fb2d43522 Mon Sep 17 00:00:00 2001 From: MUTHU-RAKESH-27 <19cs127@psgitech.ac.in> Date: Wed, 27 Mar 2024 12:44:27 +0530 Subject: [PATCH 09/18] Resolved the sanity errors --- plugins/modules/device_credential_intent.py | 2 + .../device_credential_workflow_manager.py | 2 + ...ise_radius_integration_workflow_manager.py | 1340 +++++++++++++++++ plugins/modules/network_settings_intent.py | 4 +- .../network_settings_workflow_manager.py | 4 +- plugins/modules/template_intent.py | 2 + 6 files changed, 1352 insertions(+), 2 deletions(-) create mode 100644 plugins/modules/ise_radius_integration_workflow_manager.py diff --git a/plugins/modules/device_credential_intent.py b/plugins/modules/device_credential_intent.py index ec8171d7e0..99b56df6ec 100644 --- a/plugins/modules/device_credential_intent.py +++ b/plugins/modules/device_credential_intent.py @@ -2588,6 +2588,8 @@ def main(): "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'}, "dnac_log_append": {"type": 'bool', "default": True}, "config_verify": {"type": 'bool', "default": False}, + 'dnac_api_task_timeout': {'type': 'int', "default": 1200}, + 'dnac_task_poll_interval': {'type': 'int', "default": 2}, "config": {"type": 'list', "required": True, "elements": 'dict'}, "state": {"default": 'merged', "choices": ['merged', 'deleted']}, "validate_response_schema": {"type": 'bool', "default": True}, diff --git a/plugins/modules/device_credential_workflow_manager.py b/plugins/modules/device_credential_workflow_manager.py index 1a1dde590c..f74aded0d2 100644 --- a/plugins/modules/device_credential_workflow_manager.py +++ b/plugins/modules/device_credential_workflow_manager.py @@ -2587,6 +2587,8 @@ def main(): "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'}, "dnac_log_append": {"type": 'bool', "default": True}, "config_verify": {"type": 'bool', "default": False}, + 'dnac_api_task_timeout': {'type': 'int', "default": 1200}, + 'dnac_task_poll_interval': {'type': 'int', "default": 2}, "config": {"type": 'list', "required": True, "elements": 'dict'}, "state": {"default": 'merged', "choices": ['merged', 'deleted']}, "validate_response_schema": {"type": 'bool', "default": True}, diff --git a/plugins/modules/ise_radius_integration_workflow_manager.py b/plugins/modules/ise_radius_integration_workflow_manager.py new file mode 100644 index 0000000000..df33461927 --- /dev/null +++ b/plugins/modules/ise_radius_integration_workflow_manager.py @@ -0,0 +1,1340 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# Copyright (c) 2024, Cisco Systems +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +"""Ansible module to operate the Authentication and Policy Servers in Cisco Catalyst Center.""" +from __future__ import absolute_import, division, print_function + +__metaclass__ = type +__author__ = ['Muthu Rakesh, Madhan Sankaranarayanan'] + +DOCUMENTATION = r""" +--- +module: ise_radius_integration_workflow_manager +short_description: Resource module for Authentication and Policy Servers +description: +- Manage operations on Authentication and Policy Servers. +- API to create Authentication and Policy Server Access Configuration. +- API to update Authentication and Policy Server Access Configuration. +- API to delete Authentication and Policy Server Access Configuration. +version_added: '6.13.0' +extends_documentation_fragment: + - cisco.dnac.workflow_manager_params +author: Muthu Rakesh (@MUTHU-RAKESH-27) + Madhan Sankaranarayanan (@madhansansel) +options: + config_verify: + description: Set to True to verify the Cisco Catalyst Center after applying the playbook config. + type: bool + default: False + state: + description: The state of Cisco Catalyst Center after module completion. + type: str + choices: [ merged, deleted ] + default: merged + config: + description: + - List of details of Authentication and Policy Servers being managed. + type: list + elements: dict + required: true + suboptions: + authentication_policy_server: + description: Manages the Authentication and Policy Servers. + type: dict + suboptions: + server_type: + description: + - Type of the Authentication and Policy Server. + - ISE for Cisco ISE servers. + - AAA for Non-Cisco ISE servers. + type: str + choices: [ AAA, ISE ] + default: AAA + server_ip_address: + description: Ip Address of the Authentication and Policy Server. + type: str + required: True + shared_secret: + description: + - Shared secret between devices and authentication and policy server. + - Shared secret key length should be from 4 to 10. + type: str + protocol: + description: + - Type of protocol for authentication and policy server. + - RADIUS provides centralized services (AAA) for users in remote access scenarios. + - TACACS focuses on access control and administrative authentication for network devices. + type: str + choices: [ TACACS, RADIUS, RADIUS_TACACS] + default: TACACS + encryption_scheme: + description: + - Type of encryption scheme for additional security. + - > + KEYWRAP is used for securely wrapping and unwrapping encryption keys, + ensuring their confidentiality during transmission or storage. + - > + RADSEC is an extension of RADIUS that provides secure communication + between RADIUS clients and servers over TLS/SSL. Enhances enhancing the + confidentiality and integrity of authentication and accounting data exchange. + type: str + choices: [KEYWRAP, RADSEC] + message_key: + description: + - Message key used to encrypt shared secret. + - Required when encryption_scheme is provided. + - > + When ASCII format is selected, Message Authentication Code Key may contain + alphanumeric and special characters. Key must be 20 char long. + type: str + encryption_key: + description: + - Encryption key used to encrypt shared secret. + - Required when encryption_scheme is provided. + - > + When ASCII format is selected, Encryption Key may contain + alphanumeric and special characters. Key must be 16 char long. + type: str + authentication_port: + description: + - Authentication port of RADIUS server. + - Authentication port should be from 1 to 65535. + type: str + default: "1812" + accounting_port: + description: + - Accounting port of RADIUS server. + - Accounting port should be from 1 to 65535. + type: str + default: "1813" + port: + description: + - Port of TACACS server. + - Port should be from 1 to 65535. + type: str + default: "49" + retries: + description: + - Number of communication retries between devices and authentication and policy server. + - Retries should be from 1 to 3. + type: str + default: "3" + timeout: + description: + - Number of seconds before timing out between devices and authentication and policy server. + - Timeout should be from 2 to 20. + type: str + default: "4" + role: + description: Role of authentication and policy server. + type: str + default: secondary + pxgrid_enabled: + description: + - Set True to enable the Pxgrid and False to disable the Pxgrid. + - Pxgrid is available only for the Cisco ISE Servers. + - > + PxGrid facilitates seamless integration and information sharing across products, + enhancing threat detection and response capabilities within the network ecosystem. + type: bool + default: True + use_dnac_cert_for_pxgrid: + description: Set True to use the Cisco Catalyst Center certificate for the Pxgrid. + type: bool + default: False + cisco_ise_dtos: + description: + - List of Cisco ISE Data Transfer Objects (DTOs). + - Required when server_type is set to ISE. + type: list + elements: dict + suboptions: + user_name: + description: + - User name of the Cisco ISE server. + - Required for passing the cisco_ise_dtos. + type: str + password: + description: + - Password of the Cisco ISE server. + - Required for passing the cisco_ise_dtos. + type: str + fqdn: + description: + - Fully-qualified domain name of the Cisco ISE server. + - Required for passing the cisco_ise_dtos. + type: str + ip_address: + description: + - IP Address of the Cisco ISE Server. + - Required for passing the cisco_ise_dtos. + type: str + subscriber_name: + description: + - Subscriber name of the Cisco ISE server. + - Required for passing the cisco_ise_dtos. + type: str + description: + description: Description about the Cisco ISE server. + type: str + ssh_key: + description: SSH key of the Cisco ISE server. + type: str + external_cisco_ise_ip_addr_dtos: + description: External Cisco ISE Ip address data transfer objects for future use. + type: list + elements: dict + suboptions: + external_cisco_ise_ip_addresses: + description: External Cisco ISE Ip addresses. + type: list + elements: dict + suboptions: + external_ip_address: + description: External Cisco ISE Ip address. + type: str + ise_type: + description: Type of the Authentication and Policy Server. + type: str +requirements: +- dnacentersdk == 2.7.0 +- python >= 3.5 +notes: + - SDK Method used are + system_settings.SystemSettings.add_authentication_and_policy_server_access_configuration, + system_settings.SystemSettings.edit_authentication_and_policy_server_access_configuration, + system_settings.SystemSettings.accept_cisco_ise_server_certificate_for_cisco_ise_server_integration, + system_settings.SystemSettings.delete_authentication_and_policy_server_access_configuration, + + - Paths used are + post /dna/intent/api/v1/authentication-policy-servers, + put /dna/intent/api/v1/authentication-policy-servers/${id}, + put /dna/intent/api/v1/integrate-ise/${id}, + delete /dna/intent/api/v1/authentication-policy-servers/${id} + +""" + +EXAMPLES = r""" +- name: Create an AAA server. + cisco.dnac.ise_radius_integration_workflow_manager: + dnac_host: "{{dnac_host}}" + dnac_username: "{{dnac_username}}" + dnac_password: "{{dnac_password}}" + dnac_verify: "{{dnac_verify}}" + dnac_port: "{{dnac_port}}" + dnac_version: "{{dnac_version}}" + dnac_debug: "{{dnac_debug}}" + dnac_log: True + dnac_log_level: "{{ dnac_log_level }}" + state: merged + config_verify: True + config: + - authentication_policy_server: + server_type: string + server_ip_address: string + shared_secret: string + protocol: string + encryption_scheme: string + message_key: string + encryption_key: string + authentication_port: string + accounting_port: string + port: string + retries: string + timeout: string + role: string + +- name: Create an Cisco ISE server. + cisco.dnac.ise_radius_integration_workflow_manager: + dnac_host: "{{dnac_host}}" + dnac_username: "{{dnac_username}}" + dnac_password: "{{dnac_password}}" + dnac_verify: "{{dnac_verify}}" + dnac_port: "{{dnac_port}}" + dnac_version: "{{dnac_version}}" + dnac_debug: "{{dnac_debug}}" + dnac_log: True + dnac_log_level: "{{ dnac_log_level }}" + state: merged + config_verify: True + config: + - authentication_policy_server: + server_type: string + server_ip_address: string + shared_secret: string + protocol: string + encryption_scheme: string + message_key: string + encryption_key: string + authentication_port: string + accounting_port: string + port: string + retries: string + timeout: string + role: string + use_dnac_cert_for_pxgrid: False + pxgrid_enabled: True + cisco_ise_dtos: + - user_name: string + password: string + fqdn: string + ip_address: string + subscriber_name: string + description: string + +- name: Delete an Authentication and Policy server. + cisco.dnac.ise_radius_integration_workflow_manager: + dnac_host: "{{dnac_host}}" + dnac_username: "{{dnac_username}}" + dnac_password: "{{dnac_password}}" + dnac_verify: "{{dnac_verify}}" + dnac_port: "{{dnac_port}}" + dnac_version: "{{dnac_version}}" + dnac_debug: "{{dnac_debug}}" + dnac_log: True + dnac_log_level: "{{ dnac_log_level }}" + state: merged + config_verify: True + config: + - authentication_policy_server: + server_ip_address: string +""" + +RETURN = r""" +# Case_1: Successful creation of Authentication and Policy Server. +response_1: + description: A dictionary or list with the response returned by the Cisco Catalyst Center Python SDK + returned: always + type: dict + sample: > + { + "response": { + "taskId": "string", + "url": "string" + }, + "version": "string" + } + +# Case_2: Successful updation of Authentication and Policy Server. +response_2: + description: A dictionary or list with the response returned by the Cisco Catalyst Center Python SDK + returned: always + type: dict + sample: > + { + "response": { + "taskId": "string", + "url": "string" + }, + "version": "string" + } + +# Case_3: Successful creation/updation of network +response_3: + description: A dictionary or list with the response returned by the Cisco Catalyst Center Python SDK + returned: always + type: dict + sample: > + { + "response": { + "taskId": "string", + "url": "string" + }, + "version": "string" + } +""" + +import copy +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.cisco.dnac.plugins.module_utils.dnac import ( + DnacBase, + validate_list_of_dicts, + get_dict_result, + dnac_compare_equality, +) + + +class IseRadiusIntegration(DnacBase): + """Class containing member attributes for ise_radius_integration_workflow_manager module""" + + def __init__(self, module): + super().__init__(module) + self.result["response"] = [ + {"authenticationPolicyServer": {"response": {}, "msg": {}}} + ] + self.authentication_policy_server_obj_params = \ + self.get_obj_params("authenticationPolicyServer") + + def validate_input(self): + """ + Checks if the configuration parameters provided in the playbook + meet the expected structure and data types, + as defined in the 'temp_spec' dictionary. + + Parameters: + None + + Returns: + self + + """ + + if not self.config: + self.msg = "config not available in playbook for validation" + self.status = "success" + return self + + # temp_spec is the specification for the expected structure of configuration parameters + temp_spec = { + "authentication_policy_server": { + "type": "dict", + "server_type": {"type": 'string', "choices": ["AAA", "ISE"]}, + "server_ip_address": {"type": 'string'}, + "shared_secret": {"type": 'string'}, + "protocol": {"type": 'string', "choices": ["TACACS", "RADIUS", "RADIUS_TACACS"]}, + "encryption_scheme": {"type": 'string'}, + "message_key": {"type": 'string'}, + "encryption_key": {"type": 'string'}, + "authentication_port": {"type": 'string'}, + "accounting_port": {"type": 'string'}, + "port": {"type": 'string'}, + "retries": {"type": 'string'}, + "timeout": {"type": 'string'}, + "role": {"type": 'string'}, + "pxgrid_enabled": {"type": 'bool'}, + "use_dnac_cert_for_pxgrid": {"type": 'bool'}, + "cisco_ise_dtos": { + "type": 'list', + "user_name": {"type": 'string'}, + "password": {"type": 'string'}, + "fqdn": {"type": 'string'}, + "ip_address": {"type": 'string'}, + "subscriber_name": {"type": 'string'}, + "description": {"type": 'string'}, + "ssh_key": {"type": 'string'}, + }, + "external_cisco_ise_ip_addr_dtos": { + "type": 'list', + "external_cisco_ise_ip_addresses": { + "type": 'list', + "external_ip_address": {"type": 'string'}, + }, + "ise_type": {"type": 'string'}, + } + } + } + + # Validate playbook params against the specification (temp_spec) + valid_temp, invalid_params = validate_list_of_dicts(self.config, temp_spec) + if invalid_params: + self.msg = "Invalid parameters in playbook: {0}".format("\n".join(invalid_params)) + self.status = "failed" + return self + + self.validated_config = valid_temp + self.log("Successfully validated playbook config params: {0}".format(valid_temp), "INFO") + self.msg = "Successfully validated input from the playbook" + self.status = "success" + return self + + def requires_update(self, have, want, obj_params): + """ + Check if the template config given requires update by comparing + current information wih the requested information. + + This method compares the current global pool, reserve pool, + or network details from Cisco Catalyst Center with the user-provided details + from the playbook, using a specified schema for comparison. + + Parameters: + have (dict) - Current information from the Cisco Catalyst Center + (global pool, reserve pool, network details) + want (dict) - Users provided information from the playbook + obj_params (list of tuples) - A list of parameter mappings specifying which + Cisco Catalyst Center parameters (dnac_param) + correspond to the user-provided + parameters (ansible_param). + + Returns: + bool - True if any parameter specified in obj_params differs between + current_obj and requested_obj, indicating that an update is required. + False if all specified parameters are equal. + + """ + + current_obj = have + requested_obj = want + self.log("Current State (have): {0}".format(current_obj), "DEBUG") + self.log("Desired State (want): {0}".format(requested_obj), "DEBUG") + + return any(not dnac_compare_equality(current_obj.get(dnac_param), + requested_obj.get(ansible_param)) + for (dnac_param, ansible_param) in obj_params) + + def get_obj_params(self, get_object): + """ + Get the required comparison obj_params value + + Parameters: + get_object (str) - identifier for the required obj_params + + Returns: + obj_params (list) - obj_params value for comparison. + """ + + try: + obj_params = [] + if get_object == "authenticationPolicyServer": + obj_params = [ + ("authenticationPort", "authenticationPort"), + ("accountingPort", "accountingPort"), + ("ciscoIseDtos", "ciscoIseDtos"), + ("ipAddress", "ipAddress"), + ("pxgridEnabled", "pxgridEnabled"), + ("useDnacCertForPxgrid", "useDnacCertForPxgrid"), + ("isIseEnabled", "isIseEnabled"), + ("port", "port"), + ("protocol", "protocol"), + ("retries", "retries"), + ("role", "role"), + ("sharedSecret", "sharedSecret"), + ("timeoutSeconds", "timeoutSeconds"), + ("encryptionScheme", "encryptionScheme"), + ("messageKey", "messageKey"), + ("encryptionKey", "encryptionKey"), + ("externalCiscoIseIpAddrDtos", "externalCiscoIseIpAddrDtos") + ] + else: + raise ValueError("Received an unexpected value for 'get_object': {0}" + .format(get_object)) + except Exception as msg: + self.log("Received exception: {0}".format(msg), "CRITICAL") + + return obj_params + + def get_auth_server_params(self, auth_server_info): + """ + Process Authentication and Policy Server params from playbook data for + Authentication and Policy Server config in Cisco Catalyst Center. + + Parameters: + auth_server_info (dict) - Cisco Catalyst Center data containing + information about the Authentication and Policy Server. + + Returns: + dict or None - Processed Authentication and Policy Server data in a format suitable + for Cisco Catalyst Center configuration, or None if auth_server_info is empty. + """ + + if not auth_server_info: + self.log("Authentication and Policy Server data is empty", "INFO") + return None + + self.log("Authentication and Policy Server Details: {0}".format(auth_server_info), "DEBUG") + auth_server = { + "authenticationPort": auth_server_info.get("authenticationPort"), + "accountingPort": auth_server_info.get("accountingPort"), + "isIseEnabled": auth_server_info.get("iseEnabled"), + "ipAddress": auth_server_info.get("ipAddress"), + "pxgridEnabled": auth_server_info.get("pxgridEnabled"), + "useDnacCertForPxgrid": auth_server_info.get("useDnacCertForPxgrid"), + "port": auth_server_info.get("port"), + "protocol": auth_server_info.get("protocol"), + "retries": str(auth_server_info.get("retries")), + "role": auth_server_info.get("role"), + "timeoutSeconds": str(auth_server_info.get("timeoutSeconds")), + "encryptionScheme": auth_server_info.get("encryptionScheme") + } + self.log("Formated Authentication and Policy Server details: {0}" + .format(auth_server), "DEBUG") + if auth_server.get("isIseEnabled") is True: + auth_server_ise_info = auth_server_info.get("ciscoIseDtos") + auth_server.update({"ciscoIseDtos": []}) + for ise_credential in auth_server_ise_info: + auth_server.get("ciscoIseDtos").append({ + "userName": ise_credential.get("userName"), + "fqdn": ise_credential.get("fqdn"), + "ipAddress": ise_credential.get("ipAddress"), + "subscriberName": ise_credential.get("subscriberName"), + "description": ise_credential.get("description") + }) + + return auth_server + + def auth_server_exists(self, ipAddress): + """ + Check if the Authentication and Policy Server with the given ipAddress exists + + Parameters: + ipAddress (str) - The ipAddress of the Authentication and + Policy Server to check for existence. + + Returns: + dict - A dictionary containing information about the + Authentication and Policy Server's existence: + - 'exists' (bool): True if the Authentication and Policy Server exists, False otherwise. + - 'id' (str or None): The ID of the Authentication and Policy Server if it exists + or None if it doesn't. + - 'details' (dict or None): Details of the Authentication and Policy Server if it exists + else None. + """ + + AuthServer = { + "exists": False, + "details": None, + "id": None + } + response = self.dnac._exec( + family="system_settings", + function='get_authentication_and_policy_servers', + ) + if not isinstance(response, dict): + self.log("Failed to retrieve the Authentication and Policy Server details - " + "Response is not a dictionary", "CRITICAL") + return AuthServer + + all_auth_server_details = response.get("response") + auth_server_details = get_dict_result(all_auth_server_details, "ipAddress", ipAddress) + self.log("Authentication and Policy Server Ip Address: {0}" + .format(ipAddress), "DEBUG") + self.log("Authentication and Policy Server details: {0}" + .format(auth_server_details), "DEBUG") + if not auth_server_details: + self.log("Global pool {0} does not exist".format(ipAddress), "INFO") + return AuthServer + AuthServer.update({"exists": True}) + AuthServer.update({"id": auth_server_details.get("instanceUuid")}) + AuthServer["details"] = self.get_auth_server_params(auth_server_details) + + self.log("Formatted global pool details: {0}".format(AuthServer), "DEBUG") + return AuthServer + + def get_have_authentication_policy_server(self, config): + """ + Get the current Authentication and Policy Server information from + Cisco Catalyst Center based on the provided playbook details. + check this API using check_return_status. + + Parameters: + config (dict) - Playbook details containing + Authentication and Policy Server configuration. + + Returns: + self - The current object with updated + Authentication and Policy Server information. + """ + + AuthServer = { + "exists": False, + "details": None, + "id": None + } + authentication_policy_server = config.get("authentication_policy_server") + if authentication_policy_server is None: + self.msg = "authentication_policy_server in config is missing in the playbook" + self.status = "failed" + return self + + ipAddress = authentication_policy_server.get("server_ip_address") + if ipAddress is None: + self.msg = "Mandatory Parameter server_ip_address required" + self.status = "failed" + return self + + AuthServer = self.auth_server_exists(ipAddress) + self.log("Authentication and Policy Server exists: {0}" + .format(AuthServer.get("exists")), "DEBUG") + self.log("Authentication and Policy Server details: {0}" + .format(AuthServer.get("details")), "DEBUG") + self.log("Authentication and Policy Server Id: {0}" + .format(AuthServer.get("id")), "DEBUG") + self.have.update({"authenticationPolicyServer": AuthServer}) + self.msg = "Collecting the Authentication and Policy Server " + \ + "details from the Cisco Catalyst Center." + self.status = "success" + return self + + def get_have(self, config): + """ + Get the current Authentication and Policy Server details from Cisco Catalyst Center + + Parameters: + config (dict) - Playbook details containing + Authentication and Policy Server configuration. + + Returns: + self - The current object with updated + Authentication and Policy Server information. + """ + + if config.get("authentication_policy_server") is not None: + self.get_have_authentication_policy_server(config).check_return_status() + + self.log("Current State (have): {0}".format(self.have), "INFO") + self.msg = "Successfully retrieved the details from the Cisco Catalyst Center" + self.status = "success" + return self + + def get_want_authentication_policy_server(self, auth_policy_server): + """ + Get all the Authentication Policy Server information from playbook + Set the status and the msg before returning from the API + Check the return value of the API with check_return_status() + + Parameters: + auth_policy_server (dict) - Playbook authentication policy server details + containing IpAddress, authentication port, accounting port, Cisco ISE Details, + protocol, port, retries, role, timeout seconds, encryption details. + + Returns: + self - The current object with updated desired Authentication Policy Server information. + """ + + auth_server = {} + auth_server_exists = self.have.get("authenticationPolicyServer").get("exists") + server_type = auth_policy_server.get("server_type") + if server_type not in ["ISE", "AAA", None]: + self.msg = "server_type should either be ISE or AAA but not {0}.".format(server_type) + self.status = "failed" + return self + + if server_type == "ISE": + auth_server.update({"isIseEnabled": True}) + else: + auth_server.update({"isIseEnabled": False}) + + auth_server.update({"ipAddress": auth_policy_server.get("server_ip_address")}) + + shared_secret = auth_policy_server.get("shared_secret") + if not (shared_secret or auth_server_exists): + self.msg = "shared_secret is mandatory parameter" + self.status = "failed" + return self + + if not (4 <= len(shared_secret) <= 10) or shared_secret.isspace(): + self.msg = "shared_secret should character should be between 4 to 100." + self.status = "failed" + return self + + auth_server.update({"sharedSecret": shared_secret}) + + protocol = auth_policy_server.get("protocol") + if protocol not in ["RADIUS", "TACACS", "RADIUS_TACACS", None]: + self.msg = "protocol should either be ['RADIUS', 'TACACS', 'RADIUS_TACACS']." + \ + "It should not be {0}".format(protocol) + self.status = "failed" + return self + + if protocol is not None: + auth_server.update({"protocol": protocol}) + else: + auth_server.update({"protocol": "RADIUS"}) + + encryption_scheme = str(auth_policy_server.get("encryption_scheme")) + if encryption_scheme not in ["KEYWRAP", "RADSEC", None]: + self.msg = "encryption_scheme should be in ['KEYWRAP', 'RADSEC']. " + \ + "It should not be {0}.".format(encryption_scheme) + self.status = "failed" + return self + + if encryption_scheme: + auth_server.update({"encryptionScheme": encryption_scheme}) + + if encryption_scheme == "KEYWRAP": + message_key = str(auth_policy_server.get("message_key")) + if not message_key: + self.msg = "message_key should not be empty if encryption_scheme is 'KEYWRAP'." + self.status = "failed" + return self + + if len(message_key) != 20: + self.msg = "message_key should be exactly 20 character." + self.status = "failed" + return self + + auth_server.update({"messageKey": message_key}) + + encryption_key = auth_policy_server.get("encryption_key") + if not encryption_key: + self.msg = "encryption_key should not be empty if encryption_scheme is 'KEYWRAP'." + self.status = "failed" + return self + + if len(encryption_key) != 16: + self.msg = "encryption_key should be exactly 16 characters." + self.status = "failed" + return self + + auth_server.update({"encryptionKey": encryption_key}) + + authentication_port = int(auth_policy_server.get("authentication_port")) + if not 1 <= int(authentication_port) <= 65535: + self.msg = "authentication_port should be from 1 to 65535." + self.status = "failed" + return self + + if authentication_port: + auth_server.update({"authenticationPort": authentication_port}) + else: + auth_server.update({"authenticationPort": "1812"}) + + accounting_port = int(auth_policy_server.get("accounting_port")) + if not 1 <= int(accounting_port) <= 65535: + self.msg = "accounting_port should be from 1 to 65535." + self.status = "failed" + return self + + if accounting_port: + auth_server.update({"accountingPort": accounting_port}) + else: + auth_server.update({"accountingPort": "1813"}) + + port = int(auth_policy_server.get("port")) + if port: + auth_server.update({"port": port}) + else: + auth_server.update({"port": "49"}) + + retries = str(auth_policy_server.get("retries")) + if not retries.isdigit(): + self.msg = "retries should contain only from 0-9." + self.status = "failed" + return self + + if not 1 <= int(retries) <= 3: + self.msg = "retries should be from 1 to 3." + self.status = "failed" + return self + + if retries: + auth_server.update({"retries": retries}) + else: + auth_server.update({"retries": "3"}) + + timeout = str(auth_policy_server.get("timeout")) + if not timeout.isdigit(): + self.msg = "timeout should contain only from 0-9." + self.status = "failed" + return self + + if not 2 <= int(timeout) <= 20: + self.msg = "timeout should be from 2 to 20." + self.status = "failed" + return self + + if timeout: + auth_server.update({"timeoutSeconds": timeout}) + else: + auth_server.update({"timeoutSeconds": "4"}) + + role = auth_policy_server.get("role") + if role: + auth_server.update({"role": role}) + else: + auth_server.update({"role": "secondary"}) + + if auth_server.get("isIseEnabled"): + pxgrid_enabled = auth_policy_server.get("pxgrid_enabled") + if pxgrid_enabled: + auth_server.update({"pxgridEnabled": pxgrid_enabled}) + else: + auth_server.update({"pxgridEnabled": True}) + + use_dnac_cert_for_pxgrid = auth_policy_server.get("use_dnac_cert_for_pxgrid") + if use_dnac_cert_for_pxgrid: + auth_server.update({"useDnacCertForPxgrid": use_dnac_cert_for_pxgrid}) + else: + auth_server.update({"useDnacCertForPxgrid": False}) + + cisco_ise_dtos = auth_policy_server.get("cisco_ise_dtos") + if not cisco_ise_dtos: + self.msg = "Mandatory parameter cisco_ise_dtos " + \ + "required when server_type is 'ISE'." + self.status = "failed" + return self + + auth_server.update({"ciscoIseDtos": []}) + position_ise_creds = 0 + for ise_credential in cisco_ise_dtos: + auth_server.get("ciscoIseDtos").append({}) + user_name = ise_credential.get("user_name") + if not user_name: + self.msg = "Mandatory parameter user_name required for ISE." + self.status = "failed" + return self + + auth_server.get("ciscoIseDtos")[position_ise_creds].update({ + "userName": user_name + }) + + password = ise_credential.get("password") + if not password: + self.msg = "Mandatory paramter password required for ISE." + self.status = "failed" + return self + + if not 4 <= len(password) <= 127: + self.msg = "" + self.status = "failed" + return self + + auth_server.get("ciscoIseDtos")[position_ise_creds].update({ + "password": password + }) + + fqdn = ise_credential.get("fqdn") + if not fqdn: + self.msg = "Mandatory parameter required for ISE." + self.status = "failed" + return self + + auth_server.get("ciscoIseDtos")[position_ise_creds].update({"fqdn": fqdn}) + + ip_address = ise_credential.get("ip_address") + if not ip_address: + self.msg = "Mandatory parameter ip_address required for ISE." + self.status = "failed" + return self + + auth_server.get("ciscoIseDtos")[position_ise_creds].update({ + "ipAddress": ip_address + }) + + subscriber_name = ise_credential.get("subscriber_name") + if not subscriber_name: + self.msg = "Mandatory parameter subscriber_name required for ISE." + self.status = "failed" + return self + + auth_server.get("ciscoIseDtos")[position_ise_creds].update({ + "subscriberName": subscriber_name + }) + + description = ise_credential.get("description") + if description: + auth_server.get("ciscoIseDtos")[position_ise_creds].update({ + "description": description + }) + + ssh_key = str(ise_credential.get("ssh_key")) + if ssh_key: + auth_server.get("ciscoIseDtos")[position_ise_creds].update({ + "sshkey": ssh_key + }) + + position_ise_creds += 1 + + external_cisco_ise_ip_addr_dtos = auth_policy_server \ + .get("external_cisco_ise_ip_addr_dtos") + if external_cisco_ise_ip_addr_dtos: + auth_server.update({"externalCiscoIseIpAddrDtos": []}) + position_ise_addresses = 0 + for external_cisco_ise in external_cisco_ise_ip_addr_dtos: + external_cisco_ise_ip_addresses = external_cisco_ise \ + .get("external_cisco_ise_ip_addresses") + if external_cisco_ise_ip_addresses: + auth_server.get("externalCiscoIseIpAddrDtos").append({}) + auth_server.get("externalCiscoIseIpAddrDtos")[position_ise_addresses] \ + .update({"externalCiscoIseIpAddresses": []}) + position_ise_address = 0 + for external_ip_address in external_cisco_ise_ip_addresses: + auth_server.get("externalCiscoIseIpAddrDtos")[position_ise_addresses] \ + .get("externalCiscoIseIpAddresses").append({}) + auth_server.get("externalCiscoIseIpAddrDtos")[position_ise_addresses] \ + .get("externalCiscoIseIpAddresses")[position_ise_address].update({ + "externalIpAddress": external_ip_address.get("external_ip_address") + }) + position_ise_address += 1 + ise_type = external_cisco_ise.get("ise_type") + if ise_type: + auth_server.get("externalCiscoIseIpAddrDtos")[position_ise_addresses] \ + .update({"type": ise_type}) + position_ise_addresses += 1 + + self.log("Authentication and Policy Server playbook details: {0}" + .format(auth_server), "DEBUG") + self.want.update({"authenticationPolicyServer": auth_server}) + self.msg = "Collecting the Authentication and Policy Server details from the playbook" + self.status = "success" + return self + + def get_want(self, config): + """ + Get all the Authentication Policy Server related information from playbook + + Parameters: + config (list of dict) - Playbook details + + Returns: + None + """ + + if config.get("authentication_policy_server"): + auth_policy_server = config.get("authentication_policy_server") + self.get_want_authentication_policy_server(auth_policy_server).check_return_status() + + self.log("Desired State (want): {0}".format(self.want), "INFO") + self.msg = "Successfully retrieved details from the playbook" + self.status = "success" + return self + + def accept_cisco_ise_server_certificate(self, ipAddress): + """ + Accept the Cisco ISE server certificate in Cisco Catalyst + Center provided in the playbook. + + Parameters: + ipAddress (str) - The Ip address of the Authentication and Policy Server to be deleted. + + Returns: + None + """ + + try: + AuthServer = self.auth_server_exists(ipAddress) + if not AuthServer: + self.log(str("Error while retrieving the Authentication and Policy Server {0} \ + details.".format(ipAddress)), "CRITICAL") + self.msg = "Error while retrieving the Authentication and Policy Server {0} \ + details.".format(ipAddress) + self.status = "failed" + return self + + cisco_ise_id = AuthServer.get("id") + if not cisco_ise_id: + self.log(str("Error while retrieving ht Authentication and Policy Server {0} id." + .format(ipAddress)), "CRITICAL") + self.msg = "Error while retrieving ht Authentication and Policy Server {0} id." \ + .format(ipAddress) + self.status = "failed" + return self + + response = self.dnac._exec( + family="system_settings", + function="accept_cisco_ise_server_certificate_for_cisco_ise_server_integration", + params={ + "id": cisco_ise_id, + "isCertAcceptedByUser": True + }, + ) + self.log("Received API response for 'accept_cisco_ise_server_certificate_" + "for_cisco_ise_server_integration': {0}".format(response), "DEBUG") + except Exception as msg: + self.log("Exception occurred while accepting the certificate of {0}: {1}" + .format(ipAddress, msg)) + return None + return + + def update_auth_policy_server(self, ipAddress): + """ + Update/Create Authentication and Policy Server in Cisco + Catalyst Center with fields provided in playbook. + + Parameters: + ipAddress (str) - The Ip address of the Authentication and Policy Server to be deleted. + + Returns: + None + """ + + result_auth_server = self.result.get("response")[0].get("authenticationPolicyServer") + result_auth_server.get("response").update({ipAddress: {}}) + + # Check Authentication and Policy Server exist, if not create and return + if not self.have.get("authenticationPolicyServer").get("exists"): + auth_server_params = self.want.get("authenticationPolicyServer") + self.log("Desired State for Authentication and Policy Server (want): {0}" + .format(auth_server_params), "DEBUG") + response = self.dnac._exec( + family="system_settings", + function="add_authentication_and_policy_server_access_configuration", + params=auth_server_params, + ) + if not self.want.get("authenticationPolicyServer").get("isIseEnabled"): + validation_string = "successfully created aaa settings" + else: + validation_string = "operation sucessful" + self.check_task_response_status(response, validation_string).check_return_status() + self.accept_cisco_ise_server_certificate(ipAddress) + self.log("Successfully created Authentication and Policy Server '{0}'." + .format(ipAddress), "INFO") + result_auth_server.get("response").get(ipAddress) \ + .update({ + "authenticationPolicyServer Details": self.want + .get("authenticationPolicyServer") + }) + result_auth_server.get("msg").update({ + ipAddress: "Authentication and Policy Server Created Successfully" + }) + return + + # Authentication and Policy Server exists, check update is required + # Edit API not working, remove this + if not self.requires_update(self.have.get("authenticationPolicyServer").get("details"), + self.want.get("authenticationPolicyServer"), + self.authentication_policy_server_obj_params): + self.log("Authentication and Policy Server '{0}' doesn't require an update" + .format(ipAddress), "INFO") + result_auth_server.get("response").get(ipAddress).update({ + "Cisco Catalyst Center params": + self.have.get("authenticationPolicyServer").get("details") + }) + result_auth_server.get("response").get(ipAddress).update({ + "Id": self.have.get("authenticationPolicyServer").get("id") + }) + result_auth_server.get("msg").update({ + ipAddress: "Authentication and Policy Server doesn't require an update" + }) + return + + self.log("Authentication and Policy Server requires update", "DEBUG") + + # Authenticaiton and Policy Server Exists + auth_server_params = copy.deepcopy(self.want.get("authenticationPolicyServer")) + auth_server_params.update({"id": self.have.get("authenticationPolicyServer").get("id")}) + self.log("Desired State for Authentication and Policy Server (want): {0}" + .format(auth_server_params), "DEBUG") + self.log("Current State for Authentication and Policy Server (have): {0}" + .format(self.have.get("authenticationPolicyServer").get("details")), "DEBUG") + response = self.dnac._exec( + family="system_settings", + function="edit_authentication_and_policy_server_access_configuration", + params=auth_server_params, + ) + + self.check_execution_response_status(response).check_return_status() + self.log("Authentication and Policy Server '{0}' updated successfully" + .format(ipAddress), "INFO") + result_auth_server.get("response").get(ipAddress) \ + .update({"Id": self.have.get("authenticationPolicyServer").get("id")}) + result_auth_server.get("msg").update({ + ipAddress: "Authentication and Policy Server Updated Successfully" + }) + return + + def get_diff_merged(self, config): + """ + Update or create Authentication and Policy Server in + Cisco Catalyst Center based on the playbook details. + + Parameters: + config (list of dict) - Playbook details containing + Authentication and Policy Server information. + + Returns: + self + """ + + if config.get("authentication_policy_server") is not None: + ipAddress = config.get("authentication_policy_server").get("server_ip_address") + self.update_auth_policy_server(ipAddress) + + return self + + def delete_auth_policy_server(self, ipAddress): + """ + Delete a Authentication and Policy Server by server Ip address in Cisco Catalyst Center. + + Parameters: + ipAddress (str) - The Ip address of the Authentication and Policy Server to be deleted. + + Returns: + self + """ + + auth_server_exists = self.have.get("authenticationPolicyServer").get("exists") + result_auth_server = self.result.get("response")[0].get("authenticationPolicyServer") + if not auth_server_exists: + result_auth_server.get("response").update({ + ipAddress: "Authentication and Policy Server not found" + }) + self.msg = "Authentication and Policy Server not found." + self.status = "success" + return self + + response = self.dnac._exec( + family="system_settings", + function="delete_authentication_and_policy_server_access_configuration", + params={"id": self.have.get("authenticationPolicyServer").get("id")}, + ) + + self.log("Received API response for 'delete_authentication_and_" + "policy_server_access_configuration': {0}".format(response), "DEBUG") + # Check the task status + validation_string = "successfully deleted aaa settings" + self.check_task_response_status(response, validation_string).check_return_status() + taskid = response.get("response").get("taskId") + # Update result information + result_auth_server.get("response").update({ipAddress: {}}) + result_auth_server.get("response").get(ipAddress).update({"Task Id": taskid}) + result_auth_server.get("msg").update({ + ipAddress: "Authentication and Policy Server deleted successfully." + }) + self.msg = "Authentication and Policy Server - {0} deleted successfully.".format(ipAddress) + self.status = "success" + return self + + def get_diff_deleted(self, config): + """ + Delete Authentication and Policy Server from the Cisco Catalyst Center. + + Parameters: + config (list of dict) - Playbook details + + Returns: + self + """ + + if config.get("authentication_policy_server") is not None: + ipAddress = config.get("authentication_policy_server").get("server_ip_address") + self.delete_auth_policy_server(ipAddress).check_return_status() + + return self + + def verify_diff_merged(self, config): + """ + Validating the Cisco Catalyst Center configuration with the playbook details + when state is merged (Create/Update). + + Parameters: + config (dict) - Playbook details containing + Authentication and Policy Server configuration. + + Returns: + self + """ + + self.get_have(config) + self.log("Current State (have): {0}".format(self.have), "INFO") + self.log("Requested State (want): {0}".format(self.want), "INFO") + if config.get("authentication_policy_server") is not None: + self.log("Desired State of Authentication and Policy Server (want): {0}" + .format(self.want.get("authenticationPolicyServer")), "DEBUG") + self.log("Current State of Authentication and Policy Server (have): {0}" + .format(self.have.get("authenticationPolicyServer") + .get("details")), "DEBUG") + check_list = ["isIseEnabled", "ipAddress", "pxgridEnabled", + "useDnacCertForPxgrid", "port", "protocol", + "retries", "role", "timeoutSeconds", "encryptionScheme"] + auth_server_have = self.have.get("authenticationPolicyServer").get("details") + auth_server_want = self.want.get("authenticationPolicyServer") + for item in check_list: + if auth_server_have.get(item) and auth_server_want.get(item) and \ + auth_server_have.get(item) != auth_server_want.get(item): + self.msg = "Authentication and Policy Server " + \ + "Config is not applied to the Cisco Catalyst Center." + self.status = "failed" + return self + + self.log("Successfully validated Authentication and Policy Server '{0}'." + .format(self.want.get("authenticationPolicyServer").get("ipAddress")), "INFO") + self.result.get("response")[0].get("authenticationPolicyServer").update({ + "Validation": "Success" + }) + + self.msg = "Successfully validated the Authentication and Policy Server." + self.status = "success" + return self + + def verify_diff_deleted(self, config): + """ + Validating the Cisco Catalyst Center configuration with the playbook details + when state is deleted (delete). + + Parameters: + config (dict) - Playbook details containing + Authentication and Policy Server configuration. + + Returns: + self + """ + + self.get_have(config) + ipAddress = config.get("authentication_policy_server").get("server_ip_address") + self.log("Current State (have): {0}".format(self.have), "INFO") + self.log("Authentication and Policy Server deleted from the Cisco Catalyst Center: {0}" + .format(ipAddress), "INFO") + if config.get("authentication_policy_server") is not None: + auth_server_exists = self.have.get("authenticationPolicyServer").get("exists") + if auth_server_exists: + self.msg = "Authentication and Policy Server " + \ + "Config is not applied to the Cisco Catalyst Center." + self.status = "failed" + return self + + self.log("Successfully validated absence of Authentication and Policy Server '{0}'." + .format(config.get("authentication_policy_server").get("ip_address")), "INFO") + self.result.get("response")[0].get("authenticationPolicyServer").update({ + "Validation": "Success" + }) + + self.msg = "Successfully validated the absence of Authentication and Policy Server." + self.status = "success" + return self + + def reset_values(self): + """ + Reset all neccessary attributes to default values + + Parameters: + None + + Returns: + None + """ + + self.have.clear() + self.want.clear() + return + + +def main(): + """main entry point for module execution""" + + # Define the specification for module arguments + element_spec = { + "dnac_host": {"type": 'str', "required": True}, + "dnac_port": {"type": 'str', "default": '443'}, + "dnac_username": {"type": 'str', "default": 'admin', "aliases": ['user']}, + "dnac_password": {"type": 'str', "no_log": True}, + "dnac_verify": {"type": 'bool', "default": 'True'}, + "dnac_version": {"type": 'str', "default": '2.2.3.3'}, + "dnac_debug": {"type": 'bool', "default": False}, + "dnac_log": {"type": 'bool', "default": False}, + "dnac_log_level": {"type": 'str', "default": 'WARNING'}, + "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'}, + "dnac_log_append": {"type": 'bool', "default": True}, + "config_verify": {"type": 'bool', "default": False}, + "config": {"type": 'list', "required": True, "elements": 'dict'}, + "state": {"default": 'merged', "choices": ['merged', 'deleted']}, + "validate_response_schema": {"type": 'bool', "default": True}, + } + + # Create an AnsibleModule object with argument specifications + module = AnsibleModule(argument_spec=element_spec, supports_check_mode=False) + ccc_ise_radius = IseRadiusIntegration(module) + state = ccc_ise_radius.params.get("state") + config_verify = ccc_ise_radius.params.get("config_verify") + if state not in ccc_ise_radius.supported_states: + ccc_ise_radius.status = "invalid" + ccc_ise_radius.msg = "State {0} is invalid".format(state) + ccc_ise_radius.check_return_status() + + ccc_ise_radius.validate_input().check_return_status() + + for config in ccc_ise_radius.config: + ccc_ise_radius.reset_values() + ccc_ise_radius.get_have(config).check_return_status() + if state != "deleted": + ccc_ise_radius.get_want(config).check_return_status() + ccc_ise_radius.get_diff_state_apply[state](config).check_return_status() + if config_verify: + ccc_ise_radius.verify_diff_state_apply[state](config).check_return_status() + + module.exit_json(**ccc_ise_radius.result) + + +if __name__ == "__main__": + main() diff --git a/plugins/modules/network_settings_intent.py b/plugins/modules/network_settings_intent.py index 88134e23c1..72c2e8421b 100644 --- a/plugins/modules/network_settings_intent.py +++ b/plugins/modules/network_settings_intent.py @@ -1064,7 +1064,7 @@ def reserve_pool_exists(self, name, site_name): family="network_settings", function="get_reserve_ip_subpool", op_modifies=True, - params={"site_id": site_id} + params={"siteId": site_id} ) if not isinstance(response, dict): reserve_pool.update({"success": False}) @@ -2200,6 +2200,8 @@ def main(): "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'}, "dnac_log_append": {"type": 'bool', "default": True}, "config_verify": {"type": 'bool', "default": False}, + "dnac_api_task_timeout": {"type": 'int', "default": 1200}, + "dnac_task_poll_interval": {"type": 'int', "default": 2}, "config": {"type": 'list', "required": True, "elements": 'dict'}, "state": {"default": 'merged', "choices": ['merged', 'deleted']}, "validate_response_schema": {"type": 'bool', "default": True}, diff --git a/plugins/modules/network_settings_workflow_manager.py b/plugins/modules/network_settings_workflow_manager.py index 524d236c5b..71fb089428 100644 --- a/plugins/modules/network_settings_workflow_manager.py +++ b/plugins/modules/network_settings_workflow_manager.py @@ -1060,7 +1060,7 @@ def reserve_pool_exists(self, name, site_name): family="network_settings", function="get_reserve_ip_subpool", op_modifies=True, - params={"site_id": site_id} + params={"siteId": site_id} ) if not isinstance(response, dict): reserve_pool.update({"success": False}) @@ -2185,6 +2185,8 @@ def main(): "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'}, "dnac_log_append": {"type": 'bool', "default": True}, "config_verify": {"type": 'bool', "default": False}, + "dnac_api_task_timeout": {"type": 'int', "default": 1200}, + "dnac_task_poll_interval": {"type": 'int', "default": 2}, "config": {"type": 'list', "required": True, "elements": 'dict'}, "state": {"default": 'merged', "choices": ['merged', 'deleted']}, "validate_response_schema": {"type": 'bool', "default": True}, diff --git a/plugins/modules/template_intent.py b/plugins/modules/template_intent.py index 31243cfdce..19cdc455f0 100644 --- a/plugins/modules/template_intent.py +++ b/plugins/modules/template_intent.py @@ -2775,6 +2775,8 @@ def main(): "dnac_log_append": {"type": 'bool', "default": True}, 'validate_response_schema': {'type': 'bool', 'default': True}, "config_verify": {"type": 'bool', "default": False}, + 'dnac_api_task_timeout': {'type': 'int', "default": 1200}, + 'dnac_task_poll_interval': {'type': 'int', "default": 2}, 'config': {'required': True, 'type': 'list', 'elements': 'dict'}, 'state': {'default': 'merged', 'choices': ['merged', 'deleted']} } From 80cf396ba487c5e1becce908c1761675b9d47e8f Mon Sep 17 00:00:00 2001 From: MUTHU-RAKESH-27 <19cs127@psgitech.ac.in> Date: Wed, 27 Mar 2024 12:46:15 +0530 Subject: [PATCH 10/18] Removed the ise_radius_integration_workflow_manager.py --- ...ise_radius_integration_workflow_manager.py | 1340 ----------------- 1 file changed, 1340 deletions(-) delete mode 100644 plugins/modules/ise_radius_integration_workflow_manager.py diff --git a/plugins/modules/ise_radius_integration_workflow_manager.py b/plugins/modules/ise_radius_integration_workflow_manager.py deleted file mode 100644 index df33461927..0000000000 --- a/plugins/modules/ise_radius_integration_workflow_manager.py +++ /dev/null @@ -1,1340 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- -# Copyright (c) 2024, Cisco Systems -# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) - -"""Ansible module to operate the Authentication and Policy Servers in Cisco Catalyst Center.""" -from __future__ import absolute_import, division, print_function - -__metaclass__ = type -__author__ = ['Muthu Rakesh, Madhan Sankaranarayanan'] - -DOCUMENTATION = r""" ---- -module: ise_radius_integration_workflow_manager -short_description: Resource module for Authentication and Policy Servers -description: -- Manage operations on Authentication and Policy Servers. -- API to create Authentication and Policy Server Access Configuration. -- API to update Authentication and Policy Server Access Configuration. -- API to delete Authentication and Policy Server Access Configuration. -version_added: '6.13.0' -extends_documentation_fragment: - - cisco.dnac.workflow_manager_params -author: Muthu Rakesh (@MUTHU-RAKESH-27) - Madhan Sankaranarayanan (@madhansansel) -options: - config_verify: - description: Set to True to verify the Cisco Catalyst Center after applying the playbook config. - type: bool - default: False - state: - description: The state of Cisco Catalyst Center after module completion. - type: str - choices: [ merged, deleted ] - default: merged - config: - description: - - List of details of Authentication and Policy Servers being managed. - type: list - elements: dict - required: true - suboptions: - authentication_policy_server: - description: Manages the Authentication and Policy Servers. - type: dict - suboptions: - server_type: - description: - - Type of the Authentication and Policy Server. - - ISE for Cisco ISE servers. - - AAA for Non-Cisco ISE servers. - type: str - choices: [ AAA, ISE ] - default: AAA - server_ip_address: - description: Ip Address of the Authentication and Policy Server. - type: str - required: True - shared_secret: - description: - - Shared secret between devices and authentication and policy server. - - Shared secret key length should be from 4 to 10. - type: str - protocol: - description: - - Type of protocol for authentication and policy server. - - RADIUS provides centralized services (AAA) for users in remote access scenarios. - - TACACS focuses on access control and administrative authentication for network devices. - type: str - choices: [ TACACS, RADIUS, RADIUS_TACACS] - default: TACACS - encryption_scheme: - description: - - Type of encryption scheme for additional security. - - > - KEYWRAP is used for securely wrapping and unwrapping encryption keys, - ensuring their confidentiality during transmission or storage. - - > - RADSEC is an extension of RADIUS that provides secure communication - between RADIUS clients and servers over TLS/SSL. Enhances enhancing the - confidentiality and integrity of authentication and accounting data exchange. - type: str - choices: [KEYWRAP, RADSEC] - message_key: - description: - - Message key used to encrypt shared secret. - - Required when encryption_scheme is provided. - - > - When ASCII format is selected, Message Authentication Code Key may contain - alphanumeric and special characters. Key must be 20 char long. - type: str - encryption_key: - description: - - Encryption key used to encrypt shared secret. - - Required when encryption_scheme is provided. - - > - When ASCII format is selected, Encryption Key may contain - alphanumeric and special characters. Key must be 16 char long. - type: str - authentication_port: - description: - - Authentication port of RADIUS server. - - Authentication port should be from 1 to 65535. - type: str - default: "1812" - accounting_port: - description: - - Accounting port of RADIUS server. - - Accounting port should be from 1 to 65535. - type: str - default: "1813" - port: - description: - - Port of TACACS server. - - Port should be from 1 to 65535. - type: str - default: "49" - retries: - description: - - Number of communication retries between devices and authentication and policy server. - - Retries should be from 1 to 3. - type: str - default: "3" - timeout: - description: - - Number of seconds before timing out between devices and authentication and policy server. - - Timeout should be from 2 to 20. - type: str - default: "4" - role: - description: Role of authentication and policy server. - type: str - default: secondary - pxgrid_enabled: - description: - - Set True to enable the Pxgrid and False to disable the Pxgrid. - - Pxgrid is available only for the Cisco ISE Servers. - - > - PxGrid facilitates seamless integration and information sharing across products, - enhancing threat detection and response capabilities within the network ecosystem. - type: bool - default: True - use_dnac_cert_for_pxgrid: - description: Set True to use the Cisco Catalyst Center certificate for the Pxgrid. - type: bool - default: False - cisco_ise_dtos: - description: - - List of Cisco ISE Data Transfer Objects (DTOs). - - Required when server_type is set to ISE. - type: list - elements: dict - suboptions: - user_name: - description: - - User name of the Cisco ISE server. - - Required for passing the cisco_ise_dtos. - type: str - password: - description: - - Password of the Cisco ISE server. - - Required for passing the cisco_ise_dtos. - type: str - fqdn: - description: - - Fully-qualified domain name of the Cisco ISE server. - - Required for passing the cisco_ise_dtos. - type: str - ip_address: - description: - - IP Address of the Cisco ISE Server. - - Required for passing the cisco_ise_dtos. - type: str - subscriber_name: - description: - - Subscriber name of the Cisco ISE server. - - Required for passing the cisco_ise_dtos. - type: str - description: - description: Description about the Cisco ISE server. - type: str - ssh_key: - description: SSH key of the Cisco ISE server. - type: str - external_cisco_ise_ip_addr_dtos: - description: External Cisco ISE Ip address data transfer objects for future use. - type: list - elements: dict - suboptions: - external_cisco_ise_ip_addresses: - description: External Cisco ISE Ip addresses. - type: list - elements: dict - suboptions: - external_ip_address: - description: External Cisco ISE Ip address. - type: str - ise_type: - description: Type of the Authentication and Policy Server. - type: str -requirements: -- dnacentersdk == 2.7.0 -- python >= 3.5 -notes: - - SDK Method used are - system_settings.SystemSettings.add_authentication_and_policy_server_access_configuration, - system_settings.SystemSettings.edit_authentication_and_policy_server_access_configuration, - system_settings.SystemSettings.accept_cisco_ise_server_certificate_for_cisco_ise_server_integration, - system_settings.SystemSettings.delete_authentication_and_policy_server_access_configuration, - - - Paths used are - post /dna/intent/api/v1/authentication-policy-servers, - put /dna/intent/api/v1/authentication-policy-servers/${id}, - put /dna/intent/api/v1/integrate-ise/${id}, - delete /dna/intent/api/v1/authentication-policy-servers/${id} - -""" - -EXAMPLES = r""" -- name: Create an AAA server. - cisco.dnac.ise_radius_integration_workflow_manager: - dnac_host: "{{dnac_host}}" - dnac_username: "{{dnac_username}}" - dnac_password: "{{dnac_password}}" - dnac_verify: "{{dnac_verify}}" - dnac_port: "{{dnac_port}}" - dnac_version: "{{dnac_version}}" - dnac_debug: "{{dnac_debug}}" - dnac_log: True - dnac_log_level: "{{ dnac_log_level }}" - state: merged - config_verify: True - config: - - authentication_policy_server: - server_type: string - server_ip_address: string - shared_secret: string - protocol: string - encryption_scheme: string - message_key: string - encryption_key: string - authentication_port: string - accounting_port: string - port: string - retries: string - timeout: string - role: string - -- name: Create an Cisco ISE server. - cisco.dnac.ise_radius_integration_workflow_manager: - dnac_host: "{{dnac_host}}" - dnac_username: "{{dnac_username}}" - dnac_password: "{{dnac_password}}" - dnac_verify: "{{dnac_verify}}" - dnac_port: "{{dnac_port}}" - dnac_version: "{{dnac_version}}" - dnac_debug: "{{dnac_debug}}" - dnac_log: True - dnac_log_level: "{{ dnac_log_level }}" - state: merged - config_verify: True - config: - - authentication_policy_server: - server_type: string - server_ip_address: string - shared_secret: string - protocol: string - encryption_scheme: string - message_key: string - encryption_key: string - authentication_port: string - accounting_port: string - port: string - retries: string - timeout: string - role: string - use_dnac_cert_for_pxgrid: False - pxgrid_enabled: True - cisco_ise_dtos: - - user_name: string - password: string - fqdn: string - ip_address: string - subscriber_name: string - description: string - -- name: Delete an Authentication and Policy server. - cisco.dnac.ise_radius_integration_workflow_manager: - dnac_host: "{{dnac_host}}" - dnac_username: "{{dnac_username}}" - dnac_password: "{{dnac_password}}" - dnac_verify: "{{dnac_verify}}" - dnac_port: "{{dnac_port}}" - dnac_version: "{{dnac_version}}" - dnac_debug: "{{dnac_debug}}" - dnac_log: True - dnac_log_level: "{{ dnac_log_level }}" - state: merged - config_verify: True - config: - - authentication_policy_server: - server_ip_address: string -""" - -RETURN = r""" -# Case_1: Successful creation of Authentication and Policy Server. -response_1: - description: A dictionary or list with the response returned by the Cisco Catalyst Center Python SDK - returned: always - type: dict - sample: > - { - "response": { - "taskId": "string", - "url": "string" - }, - "version": "string" - } - -# Case_2: Successful updation of Authentication and Policy Server. -response_2: - description: A dictionary or list with the response returned by the Cisco Catalyst Center Python SDK - returned: always - type: dict - sample: > - { - "response": { - "taskId": "string", - "url": "string" - }, - "version": "string" - } - -# Case_3: Successful creation/updation of network -response_3: - description: A dictionary or list with the response returned by the Cisco Catalyst Center Python SDK - returned: always - type: dict - sample: > - { - "response": { - "taskId": "string", - "url": "string" - }, - "version": "string" - } -""" - -import copy -from ansible.module_utils.basic import AnsibleModule -from ansible_collections.cisco.dnac.plugins.module_utils.dnac import ( - DnacBase, - validate_list_of_dicts, - get_dict_result, - dnac_compare_equality, -) - - -class IseRadiusIntegration(DnacBase): - """Class containing member attributes for ise_radius_integration_workflow_manager module""" - - def __init__(self, module): - super().__init__(module) - self.result["response"] = [ - {"authenticationPolicyServer": {"response": {}, "msg": {}}} - ] - self.authentication_policy_server_obj_params = \ - self.get_obj_params("authenticationPolicyServer") - - def validate_input(self): - """ - Checks if the configuration parameters provided in the playbook - meet the expected structure and data types, - as defined in the 'temp_spec' dictionary. - - Parameters: - None - - Returns: - self - - """ - - if not self.config: - self.msg = "config not available in playbook for validation" - self.status = "success" - return self - - # temp_spec is the specification for the expected structure of configuration parameters - temp_spec = { - "authentication_policy_server": { - "type": "dict", - "server_type": {"type": 'string', "choices": ["AAA", "ISE"]}, - "server_ip_address": {"type": 'string'}, - "shared_secret": {"type": 'string'}, - "protocol": {"type": 'string', "choices": ["TACACS", "RADIUS", "RADIUS_TACACS"]}, - "encryption_scheme": {"type": 'string'}, - "message_key": {"type": 'string'}, - "encryption_key": {"type": 'string'}, - "authentication_port": {"type": 'string'}, - "accounting_port": {"type": 'string'}, - "port": {"type": 'string'}, - "retries": {"type": 'string'}, - "timeout": {"type": 'string'}, - "role": {"type": 'string'}, - "pxgrid_enabled": {"type": 'bool'}, - "use_dnac_cert_for_pxgrid": {"type": 'bool'}, - "cisco_ise_dtos": { - "type": 'list', - "user_name": {"type": 'string'}, - "password": {"type": 'string'}, - "fqdn": {"type": 'string'}, - "ip_address": {"type": 'string'}, - "subscriber_name": {"type": 'string'}, - "description": {"type": 'string'}, - "ssh_key": {"type": 'string'}, - }, - "external_cisco_ise_ip_addr_dtos": { - "type": 'list', - "external_cisco_ise_ip_addresses": { - "type": 'list', - "external_ip_address": {"type": 'string'}, - }, - "ise_type": {"type": 'string'}, - } - } - } - - # Validate playbook params against the specification (temp_spec) - valid_temp, invalid_params = validate_list_of_dicts(self.config, temp_spec) - if invalid_params: - self.msg = "Invalid parameters in playbook: {0}".format("\n".join(invalid_params)) - self.status = "failed" - return self - - self.validated_config = valid_temp - self.log("Successfully validated playbook config params: {0}".format(valid_temp), "INFO") - self.msg = "Successfully validated input from the playbook" - self.status = "success" - return self - - def requires_update(self, have, want, obj_params): - """ - Check if the template config given requires update by comparing - current information wih the requested information. - - This method compares the current global pool, reserve pool, - or network details from Cisco Catalyst Center with the user-provided details - from the playbook, using a specified schema for comparison. - - Parameters: - have (dict) - Current information from the Cisco Catalyst Center - (global pool, reserve pool, network details) - want (dict) - Users provided information from the playbook - obj_params (list of tuples) - A list of parameter mappings specifying which - Cisco Catalyst Center parameters (dnac_param) - correspond to the user-provided - parameters (ansible_param). - - Returns: - bool - True if any parameter specified in obj_params differs between - current_obj and requested_obj, indicating that an update is required. - False if all specified parameters are equal. - - """ - - current_obj = have - requested_obj = want - self.log("Current State (have): {0}".format(current_obj), "DEBUG") - self.log("Desired State (want): {0}".format(requested_obj), "DEBUG") - - return any(not dnac_compare_equality(current_obj.get(dnac_param), - requested_obj.get(ansible_param)) - for (dnac_param, ansible_param) in obj_params) - - def get_obj_params(self, get_object): - """ - Get the required comparison obj_params value - - Parameters: - get_object (str) - identifier for the required obj_params - - Returns: - obj_params (list) - obj_params value for comparison. - """ - - try: - obj_params = [] - if get_object == "authenticationPolicyServer": - obj_params = [ - ("authenticationPort", "authenticationPort"), - ("accountingPort", "accountingPort"), - ("ciscoIseDtos", "ciscoIseDtos"), - ("ipAddress", "ipAddress"), - ("pxgridEnabled", "pxgridEnabled"), - ("useDnacCertForPxgrid", "useDnacCertForPxgrid"), - ("isIseEnabled", "isIseEnabled"), - ("port", "port"), - ("protocol", "protocol"), - ("retries", "retries"), - ("role", "role"), - ("sharedSecret", "sharedSecret"), - ("timeoutSeconds", "timeoutSeconds"), - ("encryptionScheme", "encryptionScheme"), - ("messageKey", "messageKey"), - ("encryptionKey", "encryptionKey"), - ("externalCiscoIseIpAddrDtos", "externalCiscoIseIpAddrDtos") - ] - else: - raise ValueError("Received an unexpected value for 'get_object': {0}" - .format(get_object)) - except Exception as msg: - self.log("Received exception: {0}".format(msg), "CRITICAL") - - return obj_params - - def get_auth_server_params(self, auth_server_info): - """ - Process Authentication and Policy Server params from playbook data for - Authentication and Policy Server config in Cisco Catalyst Center. - - Parameters: - auth_server_info (dict) - Cisco Catalyst Center data containing - information about the Authentication and Policy Server. - - Returns: - dict or None - Processed Authentication and Policy Server data in a format suitable - for Cisco Catalyst Center configuration, or None if auth_server_info is empty. - """ - - if not auth_server_info: - self.log("Authentication and Policy Server data is empty", "INFO") - return None - - self.log("Authentication and Policy Server Details: {0}".format(auth_server_info), "DEBUG") - auth_server = { - "authenticationPort": auth_server_info.get("authenticationPort"), - "accountingPort": auth_server_info.get("accountingPort"), - "isIseEnabled": auth_server_info.get("iseEnabled"), - "ipAddress": auth_server_info.get("ipAddress"), - "pxgridEnabled": auth_server_info.get("pxgridEnabled"), - "useDnacCertForPxgrid": auth_server_info.get("useDnacCertForPxgrid"), - "port": auth_server_info.get("port"), - "protocol": auth_server_info.get("protocol"), - "retries": str(auth_server_info.get("retries")), - "role": auth_server_info.get("role"), - "timeoutSeconds": str(auth_server_info.get("timeoutSeconds")), - "encryptionScheme": auth_server_info.get("encryptionScheme") - } - self.log("Formated Authentication and Policy Server details: {0}" - .format(auth_server), "DEBUG") - if auth_server.get("isIseEnabled") is True: - auth_server_ise_info = auth_server_info.get("ciscoIseDtos") - auth_server.update({"ciscoIseDtos": []}) - for ise_credential in auth_server_ise_info: - auth_server.get("ciscoIseDtos").append({ - "userName": ise_credential.get("userName"), - "fqdn": ise_credential.get("fqdn"), - "ipAddress": ise_credential.get("ipAddress"), - "subscriberName": ise_credential.get("subscriberName"), - "description": ise_credential.get("description") - }) - - return auth_server - - def auth_server_exists(self, ipAddress): - """ - Check if the Authentication and Policy Server with the given ipAddress exists - - Parameters: - ipAddress (str) - The ipAddress of the Authentication and - Policy Server to check for existence. - - Returns: - dict - A dictionary containing information about the - Authentication and Policy Server's existence: - - 'exists' (bool): True if the Authentication and Policy Server exists, False otherwise. - - 'id' (str or None): The ID of the Authentication and Policy Server if it exists - or None if it doesn't. - - 'details' (dict or None): Details of the Authentication and Policy Server if it exists - else None. - """ - - AuthServer = { - "exists": False, - "details": None, - "id": None - } - response = self.dnac._exec( - family="system_settings", - function='get_authentication_and_policy_servers', - ) - if not isinstance(response, dict): - self.log("Failed to retrieve the Authentication and Policy Server details - " - "Response is not a dictionary", "CRITICAL") - return AuthServer - - all_auth_server_details = response.get("response") - auth_server_details = get_dict_result(all_auth_server_details, "ipAddress", ipAddress) - self.log("Authentication and Policy Server Ip Address: {0}" - .format(ipAddress), "DEBUG") - self.log("Authentication and Policy Server details: {0}" - .format(auth_server_details), "DEBUG") - if not auth_server_details: - self.log("Global pool {0} does not exist".format(ipAddress), "INFO") - return AuthServer - AuthServer.update({"exists": True}) - AuthServer.update({"id": auth_server_details.get("instanceUuid")}) - AuthServer["details"] = self.get_auth_server_params(auth_server_details) - - self.log("Formatted global pool details: {0}".format(AuthServer), "DEBUG") - return AuthServer - - def get_have_authentication_policy_server(self, config): - """ - Get the current Authentication and Policy Server information from - Cisco Catalyst Center based on the provided playbook details. - check this API using check_return_status. - - Parameters: - config (dict) - Playbook details containing - Authentication and Policy Server configuration. - - Returns: - self - The current object with updated - Authentication and Policy Server information. - """ - - AuthServer = { - "exists": False, - "details": None, - "id": None - } - authentication_policy_server = config.get("authentication_policy_server") - if authentication_policy_server is None: - self.msg = "authentication_policy_server in config is missing in the playbook" - self.status = "failed" - return self - - ipAddress = authentication_policy_server.get("server_ip_address") - if ipAddress is None: - self.msg = "Mandatory Parameter server_ip_address required" - self.status = "failed" - return self - - AuthServer = self.auth_server_exists(ipAddress) - self.log("Authentication and Policy Server exists: {0}" - .format(AuthServer.get("exists")), "DEBUG") - self.log("Authentication and Policy Server details: {0}" - .format(AuthServer.get("details")), "DEBUG") - self.log("Authentication and Policy Server Id: {0}" - .format(AuthServer.get("id")), "DEBUG") - self.have.update({"authenticationPolicyServer": AuthServer}) - self.msg = "Collecting the Authentication and Policy Server " + \ - "details from the Cisco Catalyst Center." - self.status = "success" - return self - - def get_have(self, config): - """ - Get the current Authentication and Policy Server details from Cisco Catalyst Center - - Parameters: - config (dict) - Playbook details containing - Authentication and Policy Server configuration. - - Returns: - self - The current object with updated - Authentication and Policy Server information. - """ - - if config.get("authentication_policy_server") is not None: - self.get_have_authentication_policy_server(config).check_return_status() - - self.log("Current State (have): {0}".format(self.have), "INFO") - self.msg = "Successfully retrieved the details from the Cisco Catalyst Center" - self.status = "success" - return self - - def get_want_authentication_policy_server(self, auth_policy_server): - """ - Get all the Authentication Policy Server information from playbook - Set the status and the msg before returning from the API - Check the return value of the API with check_return_status() - - Parameters: - auth_policy_server (dict) - Playbook authentication policy server details - containing IpAddress, authentication port, accounting port, Cisco ISE Details, - protocol, port, retries, role, timeout seconds, encryption details. - - Returns: - self - The current object with updated desired Authentication Policy Server information. - """ - - auth_server = {} - auth_server_exists = self.have.get("authenticationPolicyServer").get("exists") - server_type = auth_policy_server.get("server_type") - if server_type not in ["ISE", "AAA", None]: - self.msg = "server_type should either be ISE or AAA but not {0}.".format(server_type) - self.status = "failed" - return self - - if server_type == "ISE": - auth_server.update({"isIseEnabled": True}) - else: - auth_server.update({"isIseEnabled": False}) - - auth_server.update({"ipAddress": auth_policy_server.get("server_ip_address")}) - - shared_secret = auth_policy_server.get("shared_secret") - if not (shared_secret or auth_server_exists): - self.msg = "shared_secret is mandatory parameter" - self.status = "failed" - return self - - if not (4 <= len(shared_secret) <= 10) or shared_secret.isspace(): - self.msg = "shared_secret should character should be between 4 to 100." - self.status = "failed" - return self - - auth_server.update({"sharedSecret": shared_secret}) - - protocol = auth_policy_server.get("protocol") - if protocol not in ["RADIUS", "TACACS", "RADIUS_TACACS", None]: - self.msg = "protocol should either be ['RADIUS', 'TACACS', 'RADIUS_TACACS']." + \ - "It should not be {0}".format(protocol) - self.status = "failed" - return self - - if protocol is not None: - auth_server.update({"protocol": protocol}) - else: - auth_server.update({"protocol": "RADIUS"}) - - encryption_scheme = str(auth_policy_server.get("encryption_scheme")) - if encryption_scheme not in ["KEYWRAP", "RADSEC", None]: - self.msg = "encryption_scheme should be in ['KEYWRAP', 'RADSEC']. " + \ - "It should not be {0}.".format(encryption_scheme) - self.status = "failed" - return self - - if encryption_scheme: - auth_server.update({"encryptionScheme": encryption_scheme}) - - if encryption_scheme == "KEYWRAP": - message_key = str(auth_policy_server.get("message_key")) - if not message_key: - self.msg = "message_key should not be empty if encryption_scheme is 'KEYWRAP'." - self.status = "failed" - return self - - if len(message_key) != 20: - self.msg = "message_key should be exactly 20 character." - self.status = "failed" - return self - - auth_server.update({"messageKey": message_key}) - - encryption_key = auth_policy_server.get("encryption_key") - if not encryption_key: - self.msg = "encryption_key should not be empty if encryption_scheme is 'KEYWRAP'." - self.status = "failed" - return self - - if len(encryption_key) != 16: - self.msg = "encryption_key should be exactly 16 characters." - self.status = "failed" - return self - - auth_server.update({"encryptionKey": encryption_key}) - - authentication_port = int(auth_policy_server.get("authentication_port")) - if not 1 <= int(authentication_port) <= 65535: - self.msg = "authentication_port should be from 1 to 65535." - self.status = "failed" - return self - - if authentication_port: - auth_server.update({"authenticationPort": authentication_port}) - else: - auth_server.update({"authenticationPort": "1812"}) - - accounting_port = int(auth_policy_server.get("accounting_port")) - if not 1 <= int(accounting_port) <= 65535: - self.msg = "accounting_port should be from 1 to 65535." - self.status = "failed" - return self - - if accounting_port: - auth_server.update({"accountingPort": accounting_port}) - else: - auth_server.update({"accountingPort": "1813"}) - - port = int(auth_policy_server.get("port")) - if port: - auth_server.update({"port": port}) - else: - auth_server.update({"port": "49"}) - - retries = str(auth_policy_server.get("retries")) - if not retries.isdigit(): - self.msg = "retries should contain only from 0-9." - self.status = "failed" - return self - - if not 1 <= int(retries) <= 3: - self.msg = "retries should be from 1 to 3." - self.status = "failed" - return self - - if retries: - auth_server.update({"retries": retries}) - else: - auth_server.update({"retries": "3"}) - - timeout = str(auth_policy_server.get("timeout")) - if not timeout.isdigit(): - self.msg = "timeout should contain only from 0-9." - self.status = "failed" - return self - - if not 2 <= int(timeout) <= 20: - self.msg = "timeout should be from 2 to 20." - self.status = "failed" - return self - - if timeout: - auth_server.update({"timeoutSeconds": timeout}) - else: - auth_server.update({"timeoutSeconds": "4"}) - - role = auth_policy_server.get("role") - if role: - auth_server.update({"role": role}) - else: - auth_server.update({"role": "secondary"}) - - if auth_server.get("isIseEnabled"): - pxgrid_enabled = auth_policy_server.get("pxgrid_enabled") - if pxgrid_enabled: - auth_server.update({"pxgridEnabled": pxgrid_enabled}) - else: - auth_server.update({"pxgridEnabled": True}) - - use_dnac_cert_for_pxgrid = auth_policy_server.get("use_dnac_cert_for_pxgrid") - if use_dnac_cert_for_pxgrid: - auth_server.update({"useDnacCertForPxgrid": use_dnac_cert_for_pxgrid}) - else: - auth_server.update({"useDnacCertForPxgrid": False}) - - cisco_ise_dtos = auth_policy_server.get("cisco_ise_dtos") - if not cisco_ise_dtos: - self.msg = "Mandatory parameter cisco_ise_dtos " + \ - "required when server_type is 'ISE'." - self.status = "failed" - return self - - auth_server.update({"ciscoIseDtos": []}) - position_ise_creds = 0 - for ise_credential in cisco_ise_dtos: - auth_server.get("ciscoIseDtos").append({}) - user_name = ise_credential.get("user_name") - if not user_name: - self.msg = "Mandatory parameter user_name required for ISE." - self.status = "failed" - return self - - auth_server.get("ciscoIseDtos")[position_ise_creds].update({ - "userName": user_name - }) - - password = ise_credential.get("password") - if not password: - self.msg = "Mandatory paramter password required for ISE." - self.status = "failed" - return self - - if not 4 <= len(password) <= 127: - self.msg = "" - self.status = "failed" - return self - - auth_server.get("ciscoIseDtos")[position_ise_creds].update({ - "password": password - }) - - fqdn = ise_credential.get("fqdn") - if not fqdn: - self.msg = "Mandatory parameter required for ISE." - self.status = "failed" - return self - - auth_server.get("ciscoIseDtos")[position_ise_creds].update({"fqdn": fqdn}) - - ip_address = ise_credential.get("ip_address") - if not ip_address: - self.msg = "Mandatory parameter ip_address required for ISE." - self.status = "failed" - return self - - auth_server.get("ciscoIseDtos")[position_ise_creds].update({ - "ipAddress": ip_address - }) - - subscriber_name = ise_credential.get("subscriber_name") - if not subscriber_name: - self.msg = "Mandatory parameter subscriber_name required for ISE." - self.status = "failed" - return self - - auth_server.get("ciscoIseDtos")[position_ise_creds].update({ - "subscriberName": subscriber_name - }) - - description = ise_credential.get("description") - if description: - auth_server.get("ciscoIseDtos")[position_ise_creds].update({ - "description": description - }) - - ssh_key = str(ise_credential.get("ssh_key")) - if ssh_key: - auth_server.get("ciscoIseDtos")[position_ise_creds].update({ - "sshkey": ssh_key - }) - - position_ise_creds += 1 - - external_cisco_ise_ip_addr_dtos = auth_policy_server \ - .get("external_cisco_ise_ip_addr_dtos") - if external_cisco_ise_ip_addr_dtos: - auth_server.update({"externalCiscoIseIpAddrDtos": []}) - position_ise_addresses = 0 - for external_cisco_ise in external_cisco_ise_ip_addr_dtos: - external_cisco_ise_ip_addresses = external_cisco_ise \ - .get("external_cisco_ise_ip_addresses") - if external_cisco_ise_ip_addresses: - auth_server.get("externalCiscoIseIpAddrDtos").append({}) - auth_server.get("externalCiscoIseIpAddrDtos")[position_ise_addresses] \ - .update({"externalCiscoIseIpAddresses": []}) - position_ise_address = 0 - for external_ip_address in external_cisco_ise_ip_addresses: - auth_server.get("externalCiscoIseIpAddrDtos")[position_ise_addresses] \ - .get("externalCiscoIseIpAddresses").append({}) - auth_server.get("externalCiscoIseIpAddrDtos")[position_ise_addresses] \ - .get("externalCiscoIseIpAddresses")[position_ise_address].update({ - "externalIpAddress": external_ip_address.get("external_ip_address") - }) - position_ise_address += 1 - ise_type = external_cisco_ise.get("ise_type") - if ise_type: - auth_server.get("externalCiscoIseIpAddrDtos")[position_ise_addresses] \ - .update({"type": ise_type}) - position_ise_addresses += 1 - - self.log("Authentication and Policy Server playbook details: {0}" - .format(auth_server), "DEBUG") - self.want.update({"authenticationPolicyServer": auth_server}) - self.msg = "Collecting the Authentication and Policy Server details from the playbook" - self.status = "success" - return self - - def get_want(self, config): - """ - Get all the Authentication Policy Server related information from playbook - - Parameters: - config (list of dict) - Playbook details - - Returns: - None - """ - - if config.get("authentication_policy_server"): - auth_policy_server = config.get("authentication_policy_server") - self.get_want_authentication_policy_server(auth_policy_server).check_return_status() - - self.log("Desired State (want): {0}".format(self.want), "INFO") - self.msg = "Successfully retrieved details from the playbook" - self.status = "success" - return self - - def accept_cisco_ise_server_certificate(self, ipAddress): - """ - Accept the Cisco ISE server certificate in Cisco Catalyst - Center provided in the playbook. - - Parameters: - ipAddress (str) - The Ip address of the Authentication and Policy Server to be deleted. - - Returns: - None - """ - - try: - AuthServer = self.auth_server_exists(ipAddress) - if not AuthServer: - self.log(str("Error while retrieving the Authentication and Policy Server {0} \ - details.".format(ipAddress)), "CRITICAL") - self.msg = "Error while retrieving the Authentication and Policy Server {0} \ - details.".format(ipAddress) - self.status = "failed" - return self - - cisco_ise_id = AuthServer.get("id") - if not cisco_ise_id: - self.log(str("Error while retrieving ht Authentication and Policy Server {0} id." - .format(ipAddress)), "CRITICAL") - self.msg = "Error while retrieving ht Authentication and Policy Server {0} id." \ - .format(ipAddress) - self.status = "failed" - return self - - response = self.dnac._exec( - family="system_settings", - function="accept_cisco_ise_server_certificate_for_cisco_ise_server_integration", - params={ - "id": cisco_ise_id, - "isCertAcceptedByUser": True - }, - ) - self.log("Received API response for 'accept_cisco_ise_server_certificate_" - "for_cisco_ise_server_integration': {0}".format(response), "DEBUG") - except Exception as msg: - self.log("Exception occurred while accepting the certificate of {0}: {1}" - .format(ipAddress, msg)) - return None - return - - def update_auth_policy_server(self, ipAddress): - """ - Update/Create Authentication and Policy Server in Cisco - Catalyst Center with fields provided in playbook. - - Parameters: - ipAddress (str) - The Ip address of the Authentication and Policy Server to be deleted. - - Returns: - None - """ - - result_auth_server = self.result.get("response")[0].get("authenticationPolicyServer") - result_auth_server.get("response").update({ipAddress: {}}) - - # Check Authentication and Policy Server exist, if not create and return - if not self.have.get("authenticationPolicyServer").get("exists"): - auth_server_params = self.want.get("authenticationPolicyServer") - self.log("Desired State for Authentication and Policy Server (want): {0}" - .format(auth_server_params), "DEBUG") - response = self.dnac._exec( - family="system_settings", - function="add_authentication_and_policy_server_access_configuration", - params=auth_server_params, - ) - if not self.want.get("authenticationPolicyServer").get("isIseEnabled"): - validation_string = "successfully created aaa settings" - else: - validation_string = "operation sucessful" - self.check_task_response_status(response, validation_string).check_return_status() - self.accept_cisco_ise_server_certificate(ipAddress) - self.log("Successfully created Authentication and Policy Server '{0}'." - .format(ipAddress), "INFO") - result_auth_server.get("response").get(ipAddress) \ - .update({ - "authenticationPolicyServer Details": self.want - .get("authenticationPolicyServer") - }) - result_auth_server.get("msg").update({ - ipAddress: "Authentication and Policy Server Created Successfully" - }) - return - - # Authentication and Policy Server exists, check update is required - # Edit API not working, remove this - if not self.requires_update(self.have.get("authenticationPolicyServer").get("details"), - self.want.get("authenticationPolicyServer"), - self.authentication_policy_server_obj_params): - self.log("Authentication and Policy Server '{0}' doesn't require an update" - .format(ipAddress), "INFO") - result_auth_server.get("response").get(ipAddress).update({ - "Cisco Catalyst Center params": - self.have.get("authenticationPolicyServer").get("details") - }) - result_auth_server.get("response").get(ipAddress).update({ - "Id": self.have.get("authenticationPolicyServer").get("id") - }) - result_auth_server.get("msg").update({ - ipAddress: "Authentication and Policy Server doesn't require an update" - }) - return - - self.log("Authentication and Policy Server requires update", "DEBUG") - - # Authenticaiton and Policy Server Exists - auth_server_params = copy.deepcopy(self.want.get("authenticationPolicyServer")) - auth_server_params.update({"id": self.have.get("authenticationPolicyServer").get("id")}) - self.log("Desired State for Authentication and Policy Server (want): {0}" - .format(auth_server_params), "DEBUG") - self.log("Current State for Authentication and Policy Server (have): {0}" - .format(self.have.get("authenticationPolicyServer").get("details")), "DEBUG") - response = self.dnac._exec( - family="system_settings", - function="edit_authentication_and_policy_server_access_configuration", - params=auth_server_params, - ) - - self.check_execution_response_status(response).check_return_status() - self.log("Authentication and Policy Server '{0}' updated successfully" - .format(ipAddress), "INFO") - result_auth_server.get("response").get(ipAddress) \ - .update({"Id": self.have.get("authenticationPolicyServer").get("id")}) - result_auth_server.get("msg").update({ - ipAddress: "Authentication and Policy Server Updated Successfully" - }) - return - - def get_diff_merged(self, config): - """ - Update or create Authentication and Policy Server in - Cisco Catalyst Center based on the playbook details. - - Parameters: - config (list of dict) - Playbook details containing - Authentication and Policy Server information. - - Returns: - self - """ - - if config.get("authentication_policy_server") is not None: - ipAddress = config.get("authentication_policy_server").get("server_ip_address") - self.update_auth_policy_server(ipAddress) - - return self - - def delete_auth_policy_server(self, ipAddress): - """ - Delete a Authentication and Policy Server by server Ip address in Cisco Catalyst Center. - - Parameters: - ipAddress (str) - The Ip address of the Authentication and Policy Server to be deleted. - - Returns: - self - """ - - auth_server_exists = self.have.get("authenticationPolicyServer").get("exists") - result_auth_server = self.result.get("response")[0].get("authenticationPolicyServer") - if not auth_server_exists: - result_auth_server.get("response").update({ - ipAddress: "Authentication and Policy Server not found" - }) - self.msg = "Authentication and Policy Server not found." - self.status = "success" - return self - - response = self.dnac._exec( - family="system_settings", - function="delete_authentication_and_policy_server_access_configuration", - params={"id": self.have.get("authenticationPolicyServer").get("id")}, - ) - - self.log("Received API response for 'delete_authentication_and_" - "policy_server_access_configuration': {0}".format(response), "DEBUG") - # Check the task status - validation_string = "successfully deleted aaa settings" - self.check_task_response_status(response, validation_string).check_return_status() - taskid = response.get("response").get("taskId") - # Update result information - result_auth_server.get("response").update({ipAddress: {}}) - result_auth_server.get("response").get(ipAddress).update({"Task Id": taskid}) - result_auth_server.get("msg").update({ - ipAddress: "Authentication and Policy Server deleted successfully." - }) - self.msg = "Authentication and Policy Server - {0} deleted successfully.".format(ipAddress) - self.status = "success" - return self - - def get_diff_deleted(self, config): - """ - Delete Authentication and Policy Server from the Cisco Catalyst Center. - - Parameters: - config (list of dict) - Playbook details - - Returns: - self - """ - - if config.get("authentication_policy_server") is not None: - ipAddress = config.get("authentication_policy_server").get("server_ip_address") - self.delete_auth_policy_server(ipAddress).check_return_status() - - return self - - def verify_diff_merged(self, config): - """ - Validating the Cisco Catalyst Center configuration with the playbook details - when state is merged (Create/Update). - - Parameters: - config (dict) - Playbook details containing - Authentication and Policy Server configuration. - - Returns: - self - """ - - self.get_have(config) - self.log("Current State (have): {0}".format(self.have), "INFO") - self.log("Requested State (want): {0}".format(self.want), "INFO") - if config.get("authentication_policy_server") is not None: - self.log("Desired State of Authentication and Policy Server (want): {0}" - .format(self.want.get("authenticationPolicyServer")), "DEBUG") - self.log("Current State of Authentication and Policy Server (have): {0}" - .format(self.have.get("authenticationPolicyServer") - .get("details")), "DEBUG") - check_list = ["isIseEnabled", "ipAddress", "pxgridEnabled", - "useDnacCertForPxgrid", "port", "protocol", - "retries", "role", "timeoutSeconds", "encryptionScheme"] - auth_server_have = self.have.get("authenticationPolicyServer").get("details") - auth_server_want = self.want.get("authenticationPolicyServer") - for item in check_list: - if auth_server_have.get(item) and auth_server_want.get(item) and \ - auth_server_have.get(item) != auth_server_want.get(item): - self.msg = "Authentication and Policy Server " + \ - "Config is not applied to the Cisco Catalyst Center." - self.status = "failed" - return self - - self.log("Successfully validated Authentication and Policy Server '{0}'." - .format(self.want.get("authenticationPolicyServer").get("ipAddress")), "INFO") - self.result.get("response")[0].get("authenticationPolicyServer").update({ - "Validation": "Success" - }) - - self.msg = "Successfully validated the Authentication and Policy Server." - self.status = "success" - return self - - def verify_diff_deleted(self, config): - """ - Validating the Cisco Catalyst Center configuration with the playbook details - when state is deleted (delete). - - Parameters: - config (dict) - Playbook details containing - Authentication and Policy Server configuration. - - Returns: - self - """ - - self.get_have(config) - ipAddress = config.get("authentication_policy_server").get("server_ip_address") - self.log("Current State (have): {0}".format(self.have), "INFO") - self.log("Authentication and Policy Server deleted from the Cisco Catalyst Center: {0}" - .format(ipAddress), "INFO") - if config.get("authentication_policy_server") is not None: - auth_server_exists = self.have.get("authenticationPolicyServer").get("exists") - if auth_server_exists: - self.msg = "Authentication and Policy Server " + \ - "Config is not applied to the Cisco Catalyst Center." - self.status = "failed" - return self - - self.log("Successfully validated absence of Authentication and Policy Server '{0}'." - .format(config.get("authentication_policy_server").get("ip_address")), "INFO") - self.result.get("response")[0].get("authenticationPolicyServer").update({ - "Validation": "Success" - }) - - self.msg = "Successfully validated the absence of Authentication and Policy Server." - self.status = "success" - return self - - def reset_values(self): - """ - Reset all neccessary attributes to default values - - Parameters: - None - - Returns: - None - """ - - self.have.clear() - self.want.clear() - return - - -def main(): - """main entry point for module execution""" - - # Define the specification for module arguments - element_spec = { - "dnac_host": {"type": 'str', "required": True}, - "dnac_port": {"type": 'str', "default": '443'}, - "dnac_username": {"type": 'str', "default": 'admin', "aliases": ['user']}, - "dnac_password": {"type": 'str', "no_log": True}, - "dnac_verify": {"type": 'bool', "default": 'True'}, - "dnac_version": {"type": 'str', "default": '2.2.3.3'}, - "dnac_debug": {"type": 'bool', "default": False}, - "dnac_log": {"type": 'bool', "default": False}, - "dnac_log_level": {"type": 'str', "default": 'WARNING'}, - "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'}, - "dnac_log_append": {"type": 'bool', "default": True}, - "config_verify": {"type": 'bool', "default": False}, - "config": {"type": 'list', "required": True, "elements": 'dict'}, - "state": {"default": 'merged', "choices": ['merged', 'deleted']}, - "validate_response_schema": {"type": 'bool', "default": True}, - } - - # Create an AnsibleModule object with argument specifications - module = AnsibleModule(argument_spec=element_spec, supports_check_mode=False) - ccc_ise_radius = IseRadiusIntegration(module) - state = ccc_ise_radius.params.get("state") - config_verify = ccc_ise_radius.params.get("config_verify") - if state not in ccc_ise_radius.supported_states: - ccc_ise_radius.status = "invalid" - ccc_ise_radius.msg = "State {0} is invalid".format(state) - ccc_ise_radius.check_return_status() - - ccc_ise_radius.validate_input().check_return_status() - - for config in ccc_ise_radius.config: - ccc_ise_radius.reset_values() - ccc_ise_radius.get_have(config).check_return_status() - if state != "deleted": - ccc_ise_radius.get_want(config).check_return_status() - ccc_ise_radius.get_diff_state_apply[state](config).check_return_status() - if config_verify: - ccc_ise_radius.verify_diff_state_apply[state](config).check_return_status() - - module.exit_json(**ccc_ise_radius.result) - - -if __name__ == "__main__": - main() From 4b39501161f1ae6352a8687667453a56f04c6c96 Mon Sep 17 00:00:00 2001 From: Abinash Date: Wed, 27 Mar 2024 16:14:04 +0000 Subject: [PATCH 11/18] Adding method to check valid ip address --- plugins/modules/discovery_intent.py | 16 ++++++++-------- plugins/modules/discovery_workflow_manager.py | 16 ++++++++-------- 2 files changed, 16 insertions(+), 16 deletions(-) diff --git a/plugins/modules/discovery_intent.py b/plugins/modules/discovery_intent.py index 41f54ae993..bbe0634883 100644 --- a/plugins/modules/discovery_intent.py +++ b/plugins/modules/discovery_intent.py @@ -721,7 +721,7 @@ def validate_input(self, state=None): self.status = "success" return self - def validate_ip_address_list(self): + def validate_ip4_address_list(self): """ Validates each ip adress paased in the IP_address_list passed by the user before preprocessing it """ @@ -733,11 +733,11 @@ def validate_ip_address_list(self): if '-' in ip: if len(ip.split('-')) == 2: ip1, ip2 = ip.split('-') - if self.is_valid_ip(ip1) is False: + if self.is_valid_ipv4(ip1) is False: msg = "IP address {0} is not valid".format(ip1) self.log(msg, "CRITICAL") self.module.fail_json(msg=msg) - if self.is_valid_ip(ip2) is False: + if self.is_valid_ipv4(ip2) is False: msg = "IP address {0} is not valid".format(ip2) self.log(msg, "CRITICAL") self.module.fail_json(msg=msg) @@ -745,18 +745,18 @@ def validate_ip_address_list(self): ip2_parts = list(map(int, ip2.split('.'))) for part in range(4): if ip1_parts[part] > ip2_parts[part]: - msg = "Incorrect range passed. Please pass correct IP address range" + msg = "Incorrect range passed: {0}. Please pass correct IP address range".format(ip) self.log(msg, "CRITICAL") self.module.fail_json(msg=msg) else: - msg = "IP address range should have only upper and lower limit values" + msg = "Provided range '{0}' is incorrect. IP address range should have only upper and lower limit values".format(ip) self.log(msg, "CRITICAL") self.module.fail_json(msg=msg) - if self.is_valid_ip(ip) is False and '-' not in ip: + if self.is_valid_ipv4(ip) is False and '-' not in ip: msg = "IP address {0} is not valid".format(ip) self.log(msg, "CRITICAL") self.module.fail_json(msg=msg) - self.log("All the IP adresses passed are correct", "INFO") + self.log("All the IP addresses passed are correct", "INFO") def get_creds_ids_list(self): """ @@ -1550,7 +1550,7 @@ def get_diff_merged(self): - self: The instance of the class with updated attributes. """ - self.validate_ip_address_list() + self.validate_ip4_address_list() devices_list_info = self.get_devices_list_info() ip_address_list = self.preprocess_device_discovery(devices_list_info) exist_discovery = self.get_exist_discovery() diff --git a/plugins/modules/discovery_workflow_manager.py b/plugins/modules/discovery_workflow_manager.py index da895f33c9..66667260da 100644 --- a/plugins/modules/discovery_workflow_manager.py +++ b/plugins/modules/discovery_workflow_manager.py @@ -721,7 +721,7 @@ def validate_input(self, state=None): self.status = "success" return self - def validate_ip_address_list(self): + def validate_ip4_address_list(self): """ Validates each ip adress paased in the IP_address_list passed by the user before preprocessing it """ @@ -733,11 +733,11 @@ def validate_ip_address_list(self): if '-' in ip: if len(ip.split('-')) == 2: ip1, ip2 = ip.split('-') - if self.is_valid_ip(ip1) is False: + if self.is_valid_ipv4(ip1) is False: msg = "IP address {0} is not valid".format(ip1) self.log(msg, "CRITICAL") self.module.fail_json(msg=msg) - if self.is_valid_ip(ip2) is False: + if self.is_valid_ipv4(ip2) is False: msg = "IP address {0} is not valid".format(ip2) self.log(msg, "CRITICAL") self.module.fail_json(msg=msg) @@ -745,18 +745,18 @@ def validate_ip_address_list(self): ip2_parts = list(map(int, ip2.split('.'))) for part in range(4): if ip1_parts[part] > ip2_parts[part]: - msg = "Incorrect range passed. Please pass correct IP address range" + msg = "Incorrect range passed: {0}. Please pass correct IP address range".format(ip) self.log(msg, "CRITICAL") self.module.fail_json(msg=msg) else: - msg = "IP address range should have only upper and lower limit values" + msg = "Provided range '{0}' is incorrect. IP address range should have only upper and lower limit values".format(ip) self.log(msg, "CRITICAL") self.module.fail_json(msg=msg) - if self.is_valid_ip(ip) is False and '-' not in ip: + if self.is_valid_ipv4(ip) is False and '-' not in ip: msg = "IP address {0} is not valid".format(ip) self.log(msg, "CRITICAL") self.module.fail_json(msg=msg) - self.log("All the IP adresses passed are correct", "INFO") + self.log("All the IP addresses passed are correct", "INFO") def get_creds_ids_list(self): """ @@ -1550,7 +1550,7 @@ def get_diff_merged(self): - self: The instance of the class with updated attributes. """ - self.validate_ip_address_list() + self.validate_ip4_address_list() devices_list_info = self.get_devices_list_info() ip_address_list = self.preprocess_device_discovery(devices_list_info) exist_discovery = self.get_exist_discovery() From 3071e444dfdc2224c79434eeae6cb75e5f1208ee Mon Sep 17 00:00:00 2001 From: Abinash Date: Wed, 27 Mar 2024 16:15:41 +0000 Subject: [PATCH 12/18] Adding method to check valid ip address --- plugins/module_utils/dnac.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/module_utils/dnac.py b/plugins/module_utils/dnac.py index ec190d6123..8064cc5e76 100644 --- a/plugins/module_utils/dnac.py +++ b/plugins/module_utils/dnac.py @@ -486,7 +486,7 @@ def update_site_type_key(self, config): return new_config - def is_valid_ip(self, ip_address): + def is_valid_ipv4(self, ip_address): """ Validates an IPv4 address. From 521fba69738435a1eb1f188ccebc40f6fbee9b6a Mon Sep 17 00:00:00 2001 From: Abhishek-121 Date: Thu, 28 Mar 2024 12:38:01 +0530 Subject: [PATCH 13/18] Add op_modifies=True in Site, SWIM and Inventory workflow as well as in intent modules --- plugins/modules/inventory_intent.py | 20 +++++++++++++++++++ plugins/modules/inventory_workflow_manager.py | 20 +++++++++++++++++++ plugins/modules/site_intent.py | 3 +++ plugins/modules/site_workflow_manager.py | 3 +++ plugins/modules/swim_intent.py | 6 ++++++ plugins/modules/swim_workflow_manager.py | 6 ++++++ 6 files changed, 58 insertions(+) diff --git a/plugins/modules/inventory_intent.py b/plugins/modules/inventory_intent.py index 675c11c918..54161725b2 100644 --- a/plugins/modules/inventory_intent.py +++ b/plugins/modules/inventory_intent.py @@ -913,6 +913,7 @@ def is_udf_exist(self, field_name): response = self.dnac._exec( family="devices", function='get_all_user_defined_fields', + op_modifies=True, params={"name": field_name}, ) @@ -943,6 +944,7 @@ def create_user_defined_field(self, udf): response = self.dnac._exec( family="devices", function='create_user_defined_field', + op_modifies=True, params=udf, ) self.log("Received API response from 'create_user_defined_field': {0}".format(str(response)), "DEBUG") @@ -986,6 +988,7 @@ def add_field_to_devices(self, device_ids, udf): response = self.dnac._exec( family="devices", function='add_user_defined_field_to_device', + op_modifies=True, params=udf_param_dict, ) self.log("Received API response from 'add_user_defined_field_to_device': {0}".format(str(response)), "DEBUG") @@ -1226,6 +1229,7 @@ def get_ap_devices(self, device_ips): response = self.dnac._exec( family="devices", function='get_device_list', + op_modifies=True, params={"managementIpAddress": device_ip} ) response = response.get('response', []) @@ -1360,6 +1364,7 @@ def reboot_access_points(self): response = self.dnac._exec( family="devices", function='get_device_list', + op_modifies=True, params={"managementIpAddress": device_ip} ) response = response.get('response') @@ -1905,6 +1910,7 @@ def get_udf_id(self, field_name): response = self.dnac._exec( family="devices", function='get_all_user_defined_fields', + op_modifies=True, params={"name": field_name}, ) self.log("Received API response from 'get_all_user_defined_fields': {0}".format(str(response)), "DEBUG") @@ -2091,6 +2097,7 @@ def get_device_ids(self, device_ips): response = self.dnac._exec( family="devices", function='get_device_list', + op_modifies=True, params={"managementIpAddress": device_ip} ) @@ -2127,6 +2134,7 @@ def get_device_ips_from_hostname(self, hostname_list): response = self.dnac._exec( family="devices", function='get_device_list', + op_modifies=True, params={"hostname": hostname} ) if response: @@ -2161,6 +2169,7 @@ def get_device_ips_from_serial_number(self, serial_number_list): response = self.dnac._exec( family="devices", function='get_device_list', + op_modifies=True, params={"serialNumber": serial_number} ) if response: @@ -2195,6 +2204,7 @@ def get_device_ips_from_mac_address(self, mac_address_list): response = self.dnac._exec( family="devices", function='get_device_list', + op_modifies=True, params={"macAddress": mac_address} ) if response: @@ -2233,6 +2243,7 @@ def get_interface_from_id_and_name(self, device_id, interface_name): response = self.dnac._exec( family="devices", function='get_interface_details', + op_modifies=True, params=interface_detail_params ) self.log("Received API response from 'get_interface_details': {0}".format(str(response)), "DEBUG") @@ -2270,6 +2281,7 @@ def get_interface_from_ip(self, device_ip): response = self.dnac._exec( family="devices", function='get_interface_by_ip', + op_modifies=True, params={"ip_address": device_ip} ) self.log("Received API response from 'get_interface_by_ip': {0}".format(str(response)), "DEBUG") @@ -2302,6 +2314,7 @@ def get_device_response(self, device_ip): response = self.dnac._exec( family="devices", function='get_device_list', + op_modifies=True, params={"managementIpAddress": device_ip} ) response = response.get('response')[0] @@ -2360,6 +2373,7 @@ def check_interface_details(self, device_ip, interface_name): response = self.dnac._exec( family="devices", function='get_interface_details', + op_modifies=True, params=interface_detail_params ) self.log("Received API response from 'get_interface_details': {0}".format(str(response)), "DEBUG") @@ -2737,6 +2751,7 @@ def is_device_exist_in_ccc(self, device_ip): response = self.dnac._exec( family="devices", function='get_device_list', + op_modifies=True, params={"managementIpAddress": device_ip} ) response = response.get('response') @@ -2982,6 +2997,7 @@ def get_diff_merged(self, config): response = self.dnac._exec( family="devices", function='get_device_list', + op_modifies=True, params={"managementIpAddress": device_ip} ) response = response.get('response')[0] @@ -3306,6 +3322,7 @@ def get_diff_deleted(self, config): response = self.dnac._exec( family="devices", function='delete_user_defined_field', + op_modifies=True, params={"id": udf_id}, ) if response and isinstance(response, dict): @@ -3356,6 +3373,7 @@ def get_diff_deleted(self, config): prov_respone = self.dnac._exec( family="sda", function='get_provisioned_wired_device', + op_modifies=True, params=provision_params, ) @@ -3363,6 +3381,7 @@ def get_diff_deleted(self, config): response = self.dnac._exec( family="sda", function='delete_provisioned_wired_device', + op_modifies=True, params=provision_params, ) executionid = response.get("executionId") @@ -3389,6 +3408,7 @@ def get_diff_deleted(self, config): response = self.dnac._exec( family="devices", function='delete_device_by_id', + op_modifies=True, params=delete_params, ) diff --git a/plugins/modules/inventory_workflow_manager.py b/plugins/modules/inventory_workflow_manager.py index 3eda0e2ccb..d42fe45441 100644 --- a/plugins/modules/inventory_workflow_manager.py +++ b/plugins/modules/inventory_workflow_manager.py @@ -912,6 +912,7 @@ def is_udf_exist(self, field_name): response = self.dnac._exec( family="devices", function='get_all_user_defined_fields', + op_modifies=True, params={"name": field_name}, ) @@ -942,6 +943,7 @@ def create_user_defined_field(self, udf): response = self.dnac._exec( family="devices", function='create_user_defined_field', + op_modifies=True, params=udf, ) self.log("Received API response from 'create_user_defined_field': {0}".format(str(response)), "DEBUG") @@ -984,6 +986,7 @@ def add_field_to_devices(self, device_ids, udf): response = self.dnac._exec( family="devices", function='add_user_defined_field_to_device', + op_modifies=True, params=udf_param_dict, ) self.log("Received API response from 'add_user_defined_field_to_device': {0}".format(str(response)), "DEBUG") @@ -1224,6 +1227,7 @@ def get_ap_devices(self, device_ips): response = self.dnac._exec( family="devices", function='get_device_list', + op_modifies=True, params={"managementIpAddress": device_ip} ) response = response.get('response', []) @@ -1358,6 +1362,7 @@ def reboot_access_points(self): response = self.dnac._exec( family="devices", function='get_device_list', + op_modifies=True, params={"managementIpAddress": device_ip} ) response = response.get('response') @@ -1897,6 +1902,7 @@ def get_udf_id(self, field_name): response = self.dnac._exec( family="devices", function='get_all_user_defined_fields', + op_modifies=True, params={"name": field_name}, ) self.log("Received API response from 'get_all_user_defined_fields': {0}".format(str(response)), "DEBUG") @@ -2084,6 +2090,7 @@ def get_device_ids(self, device_ips): response = self.dnac._exec( family="devices", function='get_device_list', + op_modifies=True, params={"managementIpAddress": device_ip} ) @@ -2120,6 +2127,7 @@ def get_device_ips_from_hostname(self, hostname_list): response = self.dnac._exec( family="devices", function='get_device_list', + op_modifies=True, params={"hostname": hostname} ) if response: @@ -2154,6 +2162,7 @@ def get_device_ips_from_serial_number(self, serial_number_list): response = self.dnac._exec( family="devices", function='get_device_list', + op_modifies=True, params={"serialNumber": serial_number} ) if response: @@ -2188,6 +2197,7 @@ def get_device_ips_from_mac_address(self, mac_address_list): response = self.dnac._exec( family="devices", function='get_device_list', + op_modifies=True, params={"macAddress": mac_address} ) if response: @@ -2226,6 +2236,7 @@ def get_interface_from_id_and_name(self, device_id, interface_name): response = self.dnac._exec( family="devices", function='get_interface_details', + op_modifies=True, params=interface_detail_params ) self.log("Received API response from 'get_interface_details': {0}".format(str(response)), "DEBUG") @@ -2263,6 +2274,7 @@ def get_interface_from_ip(self, device_ip): response = self.dnac._exec( family="devices", function='get_interface_by_ip', + op_modifies=True, params={"ip_address": device_ip} ) self.log("Received API response from 'get_interface_by_ip': {0}".format(str(response)), "DEBUG") @@ -2295,6 +2307,7 @@ def get_device_response(self, device_ip): response = self.dnac._exec( family="devices", function='get_device_list', + op_modifies=True, params={"managementIpAddress": device_ip} ) response = response.get('response')[0] @@ -2353,6 +2366,7 @@ def check_interface_details(self, device_ip, interface_name): response = self.dnac._exec( family="devices", function='get_interface_details', + op_modifies=True, params=interface_detail_params ) self.log("Received API response from 'get_interface_details': {0}".format(str(response)), "DEBUG") @@ -2730,6 +2744,7 @@ def is_device_exist_in_ccc(self, device_ip): response = self.dnac._exec( family="devices", function='get_device_list', + op_modifies=True, params={"managementIpAddress": device_ip} ) response = response.get('response') @@ -2974,6 +2989,7 @@ def get_diff_merged(self, config): response = self.dnac._exec( family="devices", function='get_device_list', + op_modifies=True, params={"managementIpAddress": device_ip} ) response = response.get('response')[0] @@ -3299,6 +3315,7 @@ def get_diff_deleted(self, config): response = self.dnac._exec( family="devices", function='delete_user_defined_field', + op_modifies=True, params={"id": udf_id}, ) if response and isinstance(response, dict): @@ -3349,6 +3366,7 @@ def get_diff_deleted(self, config): prov_respone = self.dnac._exec( family="sda", function='get_provisioned_wired_device', + op_modifies=True, params=provision_params, ) @@ -3356,6 +3374,7 @@ def get_diff_deleted(self, config): response = self.dnac._exec( family="sda", function='delete_provisioned_wired_device', + op_modifies=True, params=provision_params, ) executionid = response.get("executionId") @@ -3383,6 +3402,7 @@ def get_diff_deleted(self, config): response = self.dnac._exec( family="devices", function='delete_device_by_id', + op_modifies=True, params=delete_params, ) diff --git a/plugins/modules/site_intent.py b/plugins/modules/site_intent.py index eb6c0feefa..b9b5d8081a 100644 --- a/plugins/modules/site_intent.py +++ b/plugins/modules/site_intent.py @@ -489,6 +489,7 @@ def site_exists(self): response = self.dnac._exec( family="sites", function='get_site', + op_modifies=True, params={"name": self.want.get("site_name")}, ) @@ -883,6 +884,7 @@ def delete_single_site(self, site_id, site_name): response = self.dnac._exec( family="sites", function="delete_site", + op_modifies=True, params={"site_id": site_id}, ) @@ -947,6 +949,7 @@ def get_diff_deleted(self, config): mem_response = self.dnac._exec( family="sites", function="get_membership", + op_modifies=True, params={"site_id": site_id}, ) site_response = mem_response.get("site").get("response") diff --git a/plugins/modules/site_workflow_manager.py b/plugins/modules/site_workflow_manager.py index 1af1532b28..5a6e0bd0b1 100644 --- a/plugins/modules/site_workflow_manager.py +++ b/plugins/modules/site_workflow_manager.py @@ -488,6 +488,7 @@ def site_exists(self): response = self.dnac._exec( family="sites", function='get_site', + op_modifies=True, params={"name": self.want.get("site_name")}, ) @@ -882,6 +883,7 @@ def delete_single_site(self, site_id, site_name): response = self.dnac._exec( family="sites", function="delete_site", + op_modifies=True, params={"site_id": site_id}, ) @@ -946,6 +948,7 @@ def get_diff_deleted(self, config): mem_response = self.dnac._exec( family="sites", function="get_membership", + op_modifies=True, params={"site_id": site_id}, ) site_response = mem_response.get("site").get("response") diff --git a/plugins/modules/swim_intent.py b/plugins/modules/swim_intent.py index 08f78ac309..78954cbe92 100644 --- a/plugins/modules/swim_intent.py +++ b/plugins/modules/swim_intent.py @@ -583,6 +583,7 @@ def site_exists(self, site_name): response = self.dnac._exec( family="sites", function='get_site', + op_modifies=True, params={"name": site_name}, ) except Exception as e: @@ -617,6 +618,7 @@ def get_image_id(self, name): image_response = self.dnac._exec( family="software_image_management_swim", function='get_software_image_details', + op_modifies=True, params={"image_name": name}, ) self.log("Received API response from 'get_software_image_details': {0}".format(str(image_response)), "DEBUG") @@ -651,6 +653,7 @@ def get_image_name_from_id(self, image_id): image_response = self.dnac._exec( family="software_image_management_swim", function='get_software_image_details', + op_modifies=True, params={"image_uuid": image_id}, ) self.log("Received API response from 'get_software_image_details': {0}".format(str(image_response)), "DEBUG") @@ -686,6 +689,7 @@ def is_image_exist(self, name): image_response = self.dnac._exec( family="software_image_management_swim", function='get_software_image_details', + op_modifies=True, params={"image_name": name}, ) self.log("Received API response from 'get_software_image_details': {0}".format(str(image_response)), "DEBUG") @@ -713,6 +717,7 @@ def get_device_id(self, params): response = self.dnac._exec( family="devices", function='get_device_list', + op_modifies=True, params=params, ) self.log("Received API response from 'get_device_list': {0}".format(str(response)), "DEBUG") @@ -1305,6 +1310,7 @@ def get_device_ip_from_id(self, device_id): response = self.dnac._exec( family="devices", function='get_device_list', + op_modifies=True, params={"id": device_id} ) self.log("Received API response from 'get_device_list': {0}".format(str(response)), "DEBUG") diff --git a/plugins/modules/swim_workflow_manager.py b/plugins/modules/swim_workflow_manager.py index a147b40550..7937eb0c1e 100644 --- a/plugins/modules/swim_workflow_manager.py +++ b/plugins/modules/swim_workflow_manager.py @@ -569,6 +569,7 @@ def site_exists(self, site_name): response = self.dnac._exec( family="sites", function='get_site', + op_modifies=True, params={"name": site_name}, ) except Exception as e: @@ -603,6 +604,7 @@ def get_image_id(self, name): image_response = self.dnac._exec( family="software_image_management_swim", function='get_software_image_details', + op_modifies=True, params={"image_name": name}, ) self.log("Received API response from 'get_software_image_details': {0}".format(str(image_response)), "DEBUG") @@ -637,6 +639,7 @@ def get_image_name_from_id(self, image_id): image_response = self.dnac._exec( family="software_image_management_swim", function='get_software_image_details', + op_modifies=True, params={"image_uuid": image_id}, ) self.log("Received API response from 'get_software_image_details': {0}".format(str(image_response)), "DEBUG") @@ -672,6 +675,7 @@ def is_image_exist(self, name): image_response = self.dnac._exec( family="software_image_management_swim", function='get_software_image_details', + op_modifies=True, params={"image_name": name}, ) self.log("Received API response from 'get_software_image_details': {0}".format(str(image_response)), "DEBUG") @@ -699,6 +703,7 @@ def get_device_id(self, params): response = self.dnac._exec( family="devices", function='get_device_list', + op_modifies=True, params=params, ) self.log("Received API response from 'get_device_list': {0}".format(str(response)), "DEBUG") @@ -1291,6 +1296,7 @@ def get_device_ip_from_id(self, device_id): response = self.dnac._exec( family="devices", function='get_device_list', + op_modifies=True, params={"id": device_id} ) self.log("Received API response from 'get_device_list': {0}".format(str(response)), "DEBUG") From b51bf31fc3c4e413f176ab358f3c253ac4332fcb Mon Sep 17 00:00:00 2001 From: Abinash Date: Thu, 28 Mar 2024 12:11:30 +0000 Subject: [PATCH 14/18] Making OP_modifies as true --- playbooks/PnP.yml | 7 +++-- playbooks/PnP_Workflow_Manager_Playbook.yml | 2 +- playbooks/device_provision_workflow.yml | 10 +++---- plugins/modules/discovery_intent.py | 13 ++++++--- plugins/modules/discovery_workflow_manager.py | 13 ++++++--- plugins/modules/pnp_intent.py | 27 +++++++++++++------ plugins/modules/pnp_workflow_manager.py | 27 +++++++++++++------ plugins/modules/provision_intent.py | 8 ++++-- plugins/modules/provision_workflow_manager.py | 27 ++++++++++--------- 9 files changed, 89 insertions(+), 45 deletions(-) diff --git a/playbooks/PnP.yml b/playbooks/PnP.yml index 63bad68e09..a20885f5a0 100644 --- a/playbooks/PnP.yml +++ b/playbooks/PnP.yml @@ -66,7 +66,7 @@ template_name: "Ansible_PNP_Switch" image_name: cat9k_iosxe_npe.17.03.07.SPA.bin project_name: Onboarding Configuration - template_details: + template_params: hostname: SJC-Switch-1 interface: TwoGigabitEthernet1/0/2 device_info: @@ -108,4 +108,7 @@ - device_info: - serial_number: QD2425L8M7 #Will get deleted - serial_number: FTC2320E0HA #Doesn't exist in the inventory - - serial_number: FKC2310E0HB #Doesn't exist in the inventory \ No newline at end of file + - serial_number: FKC2310E0HB #Doesn't exist in the inventory + + + diff --git a/playbooks/PnP_Workflow_Manager_Playbook.yml b/playbooks/PnP_Workflow_Manager_Playbook.yml index 846ebf3a72..0f1ff25c1c 100644 --- a/playbooks/PnP_Workflow_Manager_Playbook.yml +++ b/playbooks/PnP_Workflow_Manager_Playbook.yml @@ -66,7 +66,7 @@ template_name: "Ansible_PNP_Switch" image_name: cat9k_iosxe_npe.17.03.07.SPA.bin project_name: Onboarding Configuration - template_details: + template_params: hostname: SJC-Switch-1 interface: TwoGigabitEthernet1/0/2 device_info: diff --git a/playbooks/device_provision_workflow.yml b/playbooks/device_provision_workflow.yml index 362556a09f..acb3249a54 100644 --- a/playbooks/device_provision_workflow.yml +++ b/playbooks/device_provision_workflow.yml @@ -16,23 +16,23 @@ dnac_port: "{{ dnac_port }}" dnac_version: "{{ dnac_version }}" dnac_debug: "{{ dnac_debug }}" - + tasks: - name: Provision a wired device to a site - cisco.dnac.workflow_manager: + cisco.dnac.provision_workflow_manager: <<: *dnac_login dnac_log: True state: merged config_verify: True config: - site_name_hierarchy: Global/USA/San Francisco/BGL_18 - management_ip_address: 204.1.1.1 + management_ip_address: 204.1.2.2 - name: Unprovision a wired device from a site - cisco.dnac.workflow_manager: + cisco.dnac.provision_workflow_manager: <<: *dnac_login dnac_log: True state: deleted config: - - management_ip_address: 204.1.1.1 + - management_ip_address: 204.1.2.2 diff --git a/plugins/modules/discovery_intent.py b/plugins/modules/discovery_intent.py index bbe0634883..33eabf254a 100644 --- a/plugins/modules/discovery_intent.py +++ b/plugins/modules/discovery_intent.py @@ -971,6 +971,7 @@ def get_ccc_global_credentials_v2_info(self): family="discovery", function='get_all_global_credentials_v2', params=self.validated_config[0].get('headers'), + op_modifies=True ) response = response.get('response') self.log("The Global credentials response from 'get all global credentials v2' API is {0}".format(str(response)), "DEBUG") @@ -1325,6 +1326,7 @@ def get_task_status(self, task_id=None): family="task", function='get_task_by_id', params=params, + op_modifies=True, ) response = response.response self.log("Task status for the task id {0} is {1}".format(str(task_id), str(response)), "INFO") @@ -1373,7 +1375,8 @@ def lookup_discovery_by_range_via_name(self): response_part = self.dnac_apply['exec']( family="discovery", function='get_discoveries_by_range', - params=params + params=params, + op_modifies=True, ) response["response"].extend(response_part["response"]) else: @@ -1386,7 +1389,8 @@ def lookup_discovery_by_range_via_name(self): response = self.dnac_apply['exec']( family="discovery", function='get_discoveries_by_range', - params=params + params=params, + op_modifies=True, ) self.log("Response of the get discoveries via range API is {0}".format(str(response)), "DEBUG") @@ -1462,6 +1466,7 @@ def get_discovery_device_info(self, discovery_id=None, task_id=None): family="discovery", function='get_discovered_network_devices_by_discovery_id', params=params, + op_modifies=True, ) devices = response.response @@ -1532,6 +1537,7 @@ def delete_exist_discovery(self, params): family="discovery", function="delete_discovery_by_id", params=params, + op_modifies=True, ) self.log("Response collected from API 'delete_discovery_by_id': {0}".format(str(response)), "DEBUG") @@ -1649,7 +1655,8 @@ def verify_diff_merged(self, config): response = self.dnac_apply['exec']( family="discovery", function='get_discovery_by_id', - params=params + params=params, + op_modifies=True, ) discovery_name = config.get('discovery_name') if response: diff --git a/plugins/modules/discovery_workflow_manager.py b/plugins/modules/discovery_workflow_manager.py index 66667260da..557327ba33 100644 --- a/plugins/modules/discovery_workflow_manager.py +++ b/plugins/modules/discovery_workflow_manager.py @@ -971,6 +971,7 @@ def get_ccc_global_credentials_v2_info(self): family="discovery", function='get_all_global_credentials_v2', params=self.validated_config[0].get('headers'), + op_modifies=True ) response = response.get('response') self.log("The Global credentials response from 'get all global credentials v2' API is {0}".format(str(response)), "DEBUG") @@ -1325,6 +1326,7 @@ def get_task_status(self, task_id=None): family="task", function='get_task_by_id', params=params, + op_modifies=True, ) response = response.response self.log("Task status for the task id {0} is {1}".format(str(task_id), str(response)), "INFO") @@ -1373,7 +1375,8 @@ def lookup_discovery_by_range_via_name(self): response_part = self.dnac_apply['exec']( family="discovery", function='get_discoveries_by_range', - params=params + params=params, + op_modifies=True, ) response["response"].extend(response_part["response"]) else: @@ -1386,7 +1389,8 @@ def lookup_discovery_by_range_via_name(self): response = self.dnac_apply['exec']( family="discovery", function='get_discoveries_by_range', - params=params + params=params, + op_modifies=True, ) self.log("Response of the get discoveries via range API is {0}".format(str(response)), "DEBUG") @@ -1462,6 +1466,7 @@ def get_discovery_device_info(self, discovery_id=None, task_id=None): family="discovery", function='get_discovered_network_devices_by_discovery_id', params=params, + op_modifies=True, ) devices = response.response @@ -1532,6 +1537,7 @@ def delete_exist_discovery(self, params): family="discovery", function="delete_discovery_by_id", params=params, + op_modifies=True, ) self.log("Response collected from API 'delete_discovery_by_id': {0}".format(str(response)), "DEBUG") @@ -1649,7 +1655,8 @@ def verify_diff_merged(self, config): response = self.dnac_apply['exec']( family="discovery", function='get_discovery_by_id', - params=params + params=params, + op_modifies=True, ) discovery_name = config.get('discovery_name') if response: diff --git a/plugins/modules/pnp_intent.py b/plugins/modules/pnp_intent.py index 3c71046a91..f0494fb588 100644 --- a/plugins/modules/pnp_intent.py +++ b/plugins/modules/pnp_intent.py @@ -429,6 +429,7 @@ def get_site_details(self): family="sites", function='get_site', params={"name": self.want.get("site_name")}, + op_modifies=True, ) except Exception: self.log("Exception occurred as site \ @@ -467,6 +468,7 @@ def get_site_type(self): family="sites", function='get_site', params={"name": self.want.get("site_name")}, + op_modifies=True, ) except Exception: self.log("Exception occurred as \ @@ -632,7 +634,7 @@ def get_claim_params(self): self.pnp_cred_failure(msg=msg) claim_params["rfProfile"] = self.validated_config[0]["rf_profile"] - self.log("Paramters used for claiming are {0}".format(str(claim_params)), "INFO") + self.log("Parameters used for claiming are {0}".format(str(claim_params)), "INFO") return claim_params def get_reset_params(self): @@ -698,7 +700,8 @@ def get_have(self): device_response = self.dnac_apply['exec']( family="device_onboarding_pnp", function='get_device_list', - params={"serial_number": self.want.get("serial_number")} + params={"serial_number": self.want.get("serial_number")}, + op_modifies=True, ) self.log("Device details for the device with serial \ number '{0}': {1}".format(self.want.get("serial_number"), str(device_response)), "DEBUG") @@ -721,6 +724,7 @@ def get_have(self): family="software_image_management_swim", function='get_software_image_details', params=self.want.get("image_params"), + op_modifies=True, ) image_list = image_response.get("response") self.log("Image details obtained from the API 'get_software_image_details': {0}".format(str(image_response)), "DEBUG") @@ -730,13 +734,15 @@ def get_have(self): family="configuration_templates", function='gets_the_templates_available', params={"project_names": self.want.get("project_name")}, + op_modifies=True, ) self.log("List of templates under the project '{0}': {1}".format(self.want.get("project_name"), str(template_list)), "DEBUG") dev_details_response = self.dnac_apply['exec']( family="device_onboarding_pnp", function="get_device_by_id", - params={"id": device_response[0].get("id")} + params={"id": device_response[0].get("id")}, + op_modifies=True, ) self.log("Device details retrieved after calling the 'get_device_by_id' API: {0}".format(str(dev_details_response)), "DEBUG") install_mode = dev_details_response.get("deviceInfo").get("mode") @@ -899,7 +905,8 @@ class instance for further use. multi_device_response = self.dnac_apply['exec']( family="device_onboarding_pnp", function='get_device_list', - params={"serial_number": device["deviceInfo"]["serialNumber"]} + params={"serial_number": device["deviceInfo"]["serialNumber"]}, + op_modifies=True, ) self.log("Device details for serial number {0} \ obtained from the API 'get_device_list': {1}".format(device["deviceInfo"]["serialNumber"], str(multi_device_response)), "DEBUG") @@ -1034,7 +1041,8 @@ class instance for further use. dev_details_response = self.dnac_apply['exec']( family="device_onboarding_pnp", function="get_device_by_id", - params={"id": self.have["device_id"]} + params={"id": self.have["device_id"]}, + op_modifies=True, ) self.log("Response from 'get_device_by_id' API for device details: {0}".format(str(dev_details_response)), "DEBUG") @@ -1133,7 +1141,8 @@ def get_diff_deleted(self): multi_device_response = self.dnac_apply['exec']( family="device_onboarding_pnp", function='get_device_list', - params={"serial_number": device["deviceInfo"]["serialNumber"]} + params={"serial_number": device["deviceInfo"]["serialNumber"]}, + op_modifies=True, ) self.log("Response from 'get_device_list' API for claiming: {0}".format(str(multi_device_response)), "DEBUG") if multi_device_response and len(multi_device_response) == 1: @@ -1190,7 +1199,8 @@ def verify_diff_merged(self, config): device_response = self.dnac_apply['exec']( family="device_onboarding_pnp", function='get_device_list', - params={"serial_number": device["deviceInfo"]["serialNumber"]} + params={"serial_number": device["deviceInfo"]["serialNumber"]}, + op_modifies=True, ) if (device_response and (len(device_response) == 1)): msg = ( @@ -1230,7 +1240,8 @@ def verify_diff_deleted(self, config): device_response = self.dnac_apply['exec']( family="device_onboarding_pnp", function='get_device_list', - params={"serial_number": device["deviceInfo"]["serialNumber"]} + params={"serial_number": device["deviceInfo"]["serialNumber"]}, + op_modifies=True, ) if not (device_response and (len(device_response) == 1)): msg = ( diff --git a/plugins/modules/pnp_workflow_manager.py b/plugins/modules/pnp_workflow_manager.py index e1b334f71e..3080f4028a 100644 --- a/plugins/modules/pnp_workflow_manager.py +++ b/plugins/modules/pnp_workflow_manager.py @@ -429,6 +429,7 @@ def get_site_details(self): family="sites", function='get_site', params={"name": self.want.get("site_name")}, + op_modifies=True, ) except Exception: self.log("Exception occurred as site \ @@ -467,6 +468,7 @@ def get_site_type(self): family="sites", function='get_site', params={"name": self.want.get("site_name")}, + op_modifies=True, ) except Exception: self.log("Exception occurred as \ @@ -632,7 +634,7 @@ def get_claim_params(self): self.pnp_cred_failure(msg=msg) claim_params["rfProfile"] = self.validated_config[0]["rf_profile"] - self.log("Paramters used for claiming are {0}".format(str(claim_params)), "INFO") + self.log("Parameters used for claiming are {0}".format(str(claim_params)), "INFO") return claim_params def get_reset_params(self): @@ -698,7 +700,8 @@ def get_have(self): device_response = self.dnac_apply['exec']( family="device_onboarding_pnp", function='get_device_list', - params={"serial_number": self.want.get("serial_number")} + params={"serial_number": self.want.get("serial_number")}, + op_modifies=True, ) self.log("Device details for the device with serial \ number '{0}': {1}".format(self.want.get("serial_number"), str(device_response)), "DEBUG") @@ -721,6 +724,7 @@ def get_have(self): family="software_image_management_swim", function='get_software_image_details', params=self.want.get("image_params"), + op_modifies=True, ) image_list = image_response.get("response") self.log("Image details obtained from the API 'get_software_image_details': {0}".format(str(image_response)), "DEBUG") @@ -730,13 +734,15 @@ def get_have(self): family="configuration_templates", function='gets_the_templates_available', params={"project_names": self.want.get("project_name")}, + op_modifies=True, ) self.log("List of templates under the project '{0}': {1}".format(self.want.get("project_name"), str(template_list)), "DEBUG") dev_details_response = self.dnac_apply['exec']( family="device_onboarding_pnp", function="get_device_by_id", - params={"id": device_response[0].get("id")} + params={"id": device_response[0].get("id")}, + op_modifies=True, ) self.log("Device details retrieved after calling the 'get_device_by_id' API: {0}".format(str(dev_details_response)), "DEBUG") install_mode = dev_details_response.get("deviceInfo").get("mode") @@ -899,7 +905,8 @@ class instance for further use. multi_device_response = self.dnac_apply['exec']( family="device_onboarding_pnp", function='get_device_list', - params={"serial_number": device["deviceInfo"]["serialNumber"]} + params={"serial_number": device["deviceInfo"]["serialNumber"]}, + op_modifies=True, ) self.log("Device details for serial number {0} \ obtained from the API 'get_device_list': {1}".format(device["deviceInfo"]["serialNumber"], str(multi_device_response)), "DEBUG") @@ -1034,7 +1041,8 @@ class instance for further use. dev_details_response = self.dnac_apply['exec']( family="device_onboarding_pnp", function="get_device_by_id", - params={"id": self.have["device_id"]} + params={"id": self.have["device_id"]}, + op_modifies=True, ) self.log("Response from 'get_device_by_id' API for device details: {0}".format(str(dev_details_response)), "DEBUG") @@ -1133,7 +1141,8 @@ def get_diff_deleted(self): multi_device_response = self.dnac_apply['exec']( family="device_onboarding_pnp", function='get_device_list', - params={"serial_number": device["deviceInfo"]["serialNumber"]} + params={"serial_number": device["deviceInfo"]["serialNumber"]}, + op_modifies=True, ) self.log("Response from 'get_device_list' API for claiming: {0}".format(str(multi_device_response)), "DEBUG") if multi_device_response and len(multi_device_response) == 1: @@ -1190,7 +1199,8 @@ def verify_diff_merged(self, config): device_response = self.dnac_apply['exec']( family="device_onboarding_pnp", function='get_device_list', - params={"serial_number": device["deviceInfo"]["serialNumber"]} + params={"serial_number": device["deviceInfo"]["serialNumber"]}, + op_modifies=True, ) if (device_response and (len(device_response) == 1)): msg = ( @@ -1230,7 +1240,8 @@ def verify_diff_deleted(self, config): device_response = self.dnac_apply['exec']( family="device_onboarding_pnp", function='get_device_list', - params={"serial_number": device["deviceInfo"]["serialNumber"]} + params={"serial_number": device["deviceInfo"]["serialNumber"]}, + op_modifies=True, ) if not (device_response and (len(device_response) == 1)): msg = ( diff --git a/plugins/modules/provision_intent.py b/plugins/modules/provision_intent.py index 4a3c8a2286..a9231c1425 100644 --- a/plugins/modules/provision_intent.py +++ b/plugins/modules/provision_intent.py @@ -246,7 +246,8 @@ def get_dev_type(self): dev_response = self.dnac_apply['exec']( family="devices", function='get_network_device_by_ip', - params={"ip_address": self.validated_config[0]["management_ip_address"]} + params={"ip_address": self.validated_config[0]["management_ip_address"]}, + op_modifies=True ) dev_dict = dev_response.get("response") @@ -282,6 +283,7 @@ def get_task_status(self, task_id=None): family="task", function='get_task_by_id', params=params, + op_modifies=True ) response = response.response if response.get('isError') or re.search( @@ -321,6 +323,7 @@ def get_site_type(self, site_name=None): family="sites", function='get_site', params={"name": site_name}, + op_modifies=True ) except Exception: self.module.fail_json(msg="Site not found", response=[]) @@ -403,7 +406,8 @@ def get_wireless_params(self): response = self.dnac_apply['exec']( family="devices", function='get_network_device_by_ip', - params={"management_ip_address": self.validated_config[0]["management_ip_address"]} + params={"management_ip_address": self.validated_config[0]["management_ip_address"]}, + op_modifies=True ) wireless_params[0]["deviceName"] = response.get("response")[0].get("hostname") diff --git a/plugins/modules/provision_workflow_manager.py b/plugins/modules/provision_workflow_manager.py index 27ae581410..f31ad3ccfa 100644 --- a/plugins/modules/provision_workflow_manager.py +++ b/plugins/modules/provision_workflow_manager.py @@ -251,7 +251,8 @@ def get_dev_type(self): dev_response = self.dnac_apply['exec']( family="devices", function='get_network_device_by_ip', - params={"ip_address": self.validated_config[0]["management_ip_address"]} + params={"ip_address": self.validated_config[0]["management_ip_address"]}, + op_modifies=True ) self.log("The device response from 'get_network_device_by_ip' API is {0}".format(str(dev_response)), "DEBUG") @@ -289,6 +290,7 @@ def get_task_status(self, task_id=None): family="task", function='get_task_by_id', params=params, + op_modifies=True ) self.log("Response collected from 'get_task_by_id' API is {0}".format(str(response)), "DEBUG") response = response.response @@ -330,6 +332,7 @@ def get_site_type(self, site_name_hierarchy=None): family="sites", function='get_site', params={"name": site_name_hierarchy}, + op_modifies=True ) except Exception: self.log("Exception occurred as \ @@ -418,7 +421,8 @@ def get_wireless_params(self): response = self.dnac_apply['exec']( family="devices", function='get_network_device_by_ip', - params={"management_ip_address": self.validated_config[0]["management_ip_address"]} + params={"management_ip_address": self.validated_config[0]["management_ip_address"]}, + op_modifies=True ) self.log("Response collected from 'get_network_device_by_ip' is:{0}".format(str(response)), "DEBUG") @@ -475,17 +479,14 @@ class instance for further use. device_type = self.want.get("device_type") if device_type == "wired": - try: - status_response = self.dnac_apply['exec']( - family="sda", - function="get_provisioned_wired_device", - op_modifies=True, - params={ - "device_management_ip_address": self.validated_config[0]["management_ip_address"] - }, - ) - except Exception: - status_response = {} + status_response = self.dnac_apply['exec']( + family="sda", + function="get_provisioned_wired_device", + op_modifies=True, + params={ + "device_management_ip_address": self.validated_config[0]["management_ip_address"] + }, + ) self.log("Wired device's status Response collected from 'get_provisioned_wired_device' API is:{0}".format(str(status_response)), "DEBUG") status = status_response.get("status") self.log("The provisioned status of the wired device is {0}".format(status), "INFO") From 7fcbda1592fd9d8165362cfcae39cb79630efd30 Mon Sep 17 00:00:00 2001 From: Abinash Date: Thu, 28 Mar 2024 12:16:52 +0000 Subject: [PATCH 15/18] Making OP_modifies as true --- playbooks/PnP.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/playbooks/PnP.yml b/playbooks/PnP.yml index a20885f5a0..31cb11f9bf 100644 --- a/playbooks/PnP.yml +++ b/playbooks/PnP.yml @@ -109,6 +109,3 @@ - serial_number: QD2425L8M7 #Will get deleted - serial_number: FTC2320E0HA #Doesn't exist in the inventory - serial_number: FKC2310E0HB #Doesn't exist in the inventory - - - From 5326e0a82d5c2578068a831766aa99a68b72ef0b Mon Sep 17 00:00:00 2001 From: Madhan Date: Mon, 1 Apr 2024 22:36:51 +0530 Subject: [PATCH 16/18] Changes in workflow manager modules --- changelogs/changelog.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index a3408e7e54..ff57a5e643 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -849,3 +849,6 @@ releases: - Added attributes 'dnac_api_task_timeout' and 'dnac_task_poll_interval' in intent and workflow_manager modules. - inventory_workflow_manager - Added attributes 'add_user_defined_field', 'update_interface_details', 'export_device_list' and 'admin_status' - inventory_workflow_manager - Removed attributes 'provision_wireless_device', 'reprovision_wired_device' + - Added the op_modifies=True when calling SDK APIs in the workflow manager modules. + - Added a method to validate IP addresses. + - Fixed a minor issue in the site workflow manager module. From 4bcde48af071730dff9fcb39ed7034da63b1969e Mon Sep 17 00:00:00 2001 From: Madhan Date: Wed, 3 Apr 2024 07:18:49 +0530 Subject: [PATCH 17/18] Updating galaxy and changelog files --- changelogs/changelog.yaml | 5 +++++ galaxy.yml | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index ff57a5e643..44e14aebd6 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -849,6 +849,11 @@ releases: - Added attributes 'dnac_api_task_timeout' and 'dnac_task_poll_interval' in intent and workflow_manager modules. - inventory_workflow_manager - Added attributes 'add_user_defined_field', 'update_interface_details', 'export_device_list' and 'admin_status' - inventory_workflow_manager - Removed attributes 'provision_wireless_device', 'reprovision_wired_device' + 6.13.2: + release_date: "2024-04-03" + changes: + release_summary: Enhancements in discovery, site, swim and inventory workflow manager modules. + minor_changes: - Added the op_modifies=True when calling SDK APIs in the workflow manager modules. - Added a method to validate IP addresses. - Fixed a minor issue in the site workflow manager module. diff --git a/galaxy.yml b/galaxy.yml index 7faebedd5a..90484a1cfd 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -1,7 +1,7 @@ --- namespace: cisco name: dnac -version: 6.13.1 +version: 6.13.2 readme: README.md authors: - Rafael Campos From 73e92a26b908bc521440151692f7ea7ed43b2347 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Francisco=20Mu=C3=B1oz?= Date: Wed, 3 Apr 2024 11:58:51 -0600 Subject: [PATCH 18/18] - Updating galaxy.yml ansible.utils dependencies. --- changelogs/changelog.yaml | 1 + galaxy.yml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/changelogs/changelog.yaml b/changelogs/changelog.yaml index 44e14aebd6..2720b6ee3b 100644 --- a/changelogs/changelog.yaml +++ b/changelogs/changelog.yaml @@ -857,3 +857,4 @@ releases: - Added the op_modifies=True when calling SDK APIs in the workflow manager modules. - Added a method to validate IP addresses. - Fixed a minor issue in the site workflow manager module. + - Updating galaxy.yml ansible.utils dependencies. diff --git a/galaxy.yml b/galaxy.yml index 90484a1cfd..2e40b75e16 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -27,7 +27,7 @@ tags: - networking - sdn dependencies: - ansible.utils: ">=2.0.0,<4.0" + ansible.utils: ">=2.0.0,<5.0" repository: https://github.com/cisco-en-programmability/dnacenter-ansible documentation: https://cisco-en-programmability.github.io/dnacenter-ansible/ homepage: https://github.com/cisco-en-programmability/dnacenter-ansible